__init__.py 69 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982
  1. """
  2. BitBake 'Fetch' implementations
  3. Classes for obtaining upstream sources for the
  4. BitBake build tools.
  5. """
  6. # Copyright (C) 2003, 2004 Chris Larson
  7. # Copyright (C) 2012 Intel Corporation
  8. #
  9. # SPDX-License-Identifier: GPL-2.0-only
  10. #
  11. # Based on functions from the base bb module, Copyright 2003 Holger Schurig
  12. import os, re
  13. import signal
  14. import logging
  15. import urllib.request, urllib.parse, urllib.error
  16. if 'git' not in urllib.parse.uses_netloc:
  17. urllib.parse.uses_netloc.append('git')
  18. import operator
  19. import collections
  20. import subprocess
  21. import pickle
  22. import errno
  23. import bb.persist_data, bb.utils
  24. import bb.checksum
  25. import bb.process
  26. import bb.event
  27. __version__ = "2"
  28. _checksum_cache = bb.checksum.FileChecksumCache()
  29. logger = logging.getLogger("BitBake.Fetcher")
  30. CHECKSUM_LIST = [ "md5", "sha256", "sha1", "sha384", "sha512" ]
  31. SHOWN_CHECKSUM_LIST = ["sha256"]
  32. class BBFetchException(Exception):
  33. """Class all fetch exceptions inherit from"""
  34. def __init__(self, message):
  35. self.msg = message
  36. Exception.__init__(self, message)
  37. def __str__(self):
  38. return self.msg
  39. class UntrustedUrl(BBFetchException):
  40. """Exception raised when encountering a host not listed in BB_ALLOWED_NETWORKS"""
  41. def __init__(self, url, message=''):
  42. if message:
  43. msg = message
  44. else:
  45. msg = "The URL: '%s' is not trusted and cannot be used" % url
  46. self.url = url
  47. BBFetchException.__init__(self, msg)
  48. self.args = (url,)
  49. class MalformedUrl(BBFetchException):
  50. """Exception raised when encountering an invalid url"""
  51. def __init__(self, url, message=''):
  52. if message:
  53. msg = message
  54. else:
  55. msg = "The URL: '%s' is invalid and cannot be interpreted" % url
  56. self.url = url
  57. BBFetchException.__init__(self, msg)
  58. self.args = (url,)
  59. class FetchError(BBFetchException):
  60. """General fetcher exception when something happens incorrectly"""
  61. def __init__(self, message, url = None):
  62. if url:
  63. msg = "Fetcher failure for URL: '%s'. %s" % (url, message)
  64. else:
  65. msg = "Fetcher failure: %s" % message
  66. self.url = url
  67. BBFetchException.__init__(self, msg)
  68. self.args = (message, url)
  69. class ChecksumError(FetchError):
  70. """Exception when mismatched checksum encountered"""
  71. def __init__(self, message, url = None, checksum = None):
  72. self.checksum = checksum
  73. FetchError.__init__(self, message, url)
  74. class NoChecksumError(FetchError):
  75. """Exception when no checksum is specified, but BB_STRICT_CHECKSUM is set"""
  76. class UnpackError(BBFetchException):
  77. """General fetcher exception when something happens incorrectly when unpacking"""
  78. def __init__(self, message, url):
  79. msg = "Unpack failure for URL: '%s'. %s" % (url, message)
  80. self.url = url
  81. BBFetchException.__init__(self, msg)
  82. self.args = (message, url)
  83. class NoMethodError(BBFetchException):
  84. """Exception raised when there is no method to obtain a supplied url or set of urls"""
  85. def __init__(self, url):
  86. msg = "Could not find a fetcher which supports the URL: '%s'" % url
  87. self.url = url
  88. BBFetchException.__init__(self, msg)
  89. self.args = (url,)
  90. class MissingParameterError(BBFetchException):
  91. """Exception raised when a fetch method is missing a critical parameter in the url"""
  92. def __init__(self, missing, url):
  93. msg = "URL: '%s' is missing the required parameter '%s'" % (url, missing)
  94. self.url = url
  95. self.missing = missing
  96. BBFetchException.__init__(self, msg)
  97. self.args = (missing, url)
  98. class ParameterError(BBFetchException):
  99. """Exception raised when a url cannot be processed due to invalid parameters."""
  100. def __init__(self, message, url):
  101. msg = "URL: '%s' has invalid parameters. %s" % (url, message)
  102. self.url = url
  103. BBFetchException.__init__(self, msg)
  104. self.args = (message, url)
  105. class NetworkAccess(BBFetchException):
  106. """Exception raised when network access is disabled but it is required."""
  107. def __init__(self, url, cmd):
  108. msg = "Network access disabled through BB_NO_NETWORK (or set indirectly due to use of BB_FETCH_PREMIRRORONLY) but access requested with command %s (for url %s)" % (cmd, url)
  109. self.url = url
  110. self.cmd = cmd
  111. BBFetchException.__init__(self, msg)
  112. self.args = (url, cmd)
  113. class NonLocalMethod(Exception):
  114. def __init__(self):
  115. Exception.__init__(self)
  116. class MissingChecksumEvent(bb.event.Event):
  117. def __init__(self, url, **checksums):
  118. self.url = url
  119. self.checksums = checksums
  120. bb.event.Event.__init__(self)
  121. class URI(object):
  122. """
  123. A class representing a generic URI, with methods for
  124. accessing the URI components, and stringifies to the
  125. URI.
  126. It is constructed by calling it with a URI, or setting
  127. the attributes manually:
  128. uri = URI("http://example.com/")
  129. uri = URI()
  130. uri.scheme = 'http'
  131. uri.hostname = 'example.com'
  132. uri.path = '/'
  133. It has the following attributes:
  134. * scheme (read/write)
  135. * userinfo (authentication information) (read/write)
  136. * username (read/write)
  137. * password (read/write)
  138. Note, password is deprecated as of RFC 3986.
  139. * hostname (read/write)
  140. * port (read/write)
  141. * hostport (read only)
  142. "hostname:port", if both are set, otherwise just "hostname"
  143. * path (read/write)
  144. * path_quoted (read/write)
  145. A URI quoted version of path
  146. * params (dict) (read/write)
  147. * query (dict) (read/write)
  148. * relative (bool) (read only)
  149. True if this is a "relative URI", (e.g. file:foo.diff)
  150. It stringifies to the URI itself.
  151. Some notes about relative URIs: while it's specified that
  152. a URI beginning with <scheme>:// should either be directly
  153. followed by a hostname or a /, the old URI handling of the
  154. fetch2 library did not conform to this. Therefore, this URI
  155. class has some kludges to make sure that URIs are parsed in
  156. a way comforming to bitbake's current usage. This URI class
  157. supports the following:
  158. file:relative/path.diff (IETF compliant)
  159. git:relative/path.git (IETF compliant)
  160. git:///absolute/path.git (IETF compliant)
  161. file:///absolute/path.diff (IETF compliant)
  162. file://relative/path.diff (not IETF compliant)
  163. But it does not support the following:
  164. file://hostname/absolute/path.diff (would be IETF compliant)
  165. Note that the last case only applies to a list of
  166. explicitly allowed schemes (currently only file://), that requires
  167. its URIs to not have a network location.
  168. """
  169. _relative_schemes = ['file', 'git']
  170. _netloc_forbidden = ['file']
  171. def __init__(self, uri=None):
  172. self.scheme = ''
  173. self.userinfo = ''
  174. self.hostname = ''
  175. self.port = None
  176. self._path = ''
  177. self.params = {}
  178. self.query = {}
  179. self.relative = False
  180. if not uri:
  181. return
  182. # We hijack the URL parameters, since the way bitbake uses
  183. # them are not quite RFC compliant.
  184. uri, param_str = (uri.split(";", 1) + [None])[:2]
  185. urlp = urllib.parse.urlparse(uri)
  186. self.scheme = urlp.scheme
  187. reparse = 0
  188. # Coerce urlparse to make URI scheme use netloc
  189. if not self.scheme in urllib.parse.uses_netloc:
  190. urllib.parse.uses_params.append(self.scheme)
  191. reparse = 1
  192. # Make urlparse happy(/ier) by converting local resources
  193. # to RFC compliant URL format. E.g.:
  194. # file://foo.diff -> file:foo.diff
  195. if urlp.scheme in self._netloc_forbidden:
  196. uri = re.sub("(?<=:)//(?!/)", "", uri, 1)
  197. reparse = 1
  198. if reparse:
  199. urlp = urllib.parse.urlparse(uri)
  200. # Identify if the URI is relative or not
  201. if urlp.scheme in self._relative_schemes and \
  202. re.compile(r"^\w+:(?!//)").match(uri):
  203. self.relative = True
  204. if not self.relative:
  205. self.hostname = urlp.hostname or ''
  206. self.port = urlp.port
  207. self.userinfo += urlp.username or ''
  208. if urlp.password:
  209. self.userinfo += ':%s' % urlp.password
  210. self.path = urllib.parse.unquote(urlp.path)
  211. if param_str:
  212. self.params = self._param_str_split(param_str, ";")
  213. if urlp.query:
  214. self.query = self._param_str_split(urlp.query, "&")
  215. def __str__(self):
  216. userinfo = self.userinfo
  217. if userinfo:
  218. userinfo += '@'
  219. return "%s:%s%s%s%s%s%s" % (
  220. self.scheme,
  221. '' if self.relative else '//',
  222. userinfo,
  223. self.hostport,
  224. self.path_quoted,
  225. self._query_str(),
  226. self._param_str())
  227. def _param_str(self):
  228. return (
  229. ''.join([';', self._param_str_join(self.params, ";")])
  230. if self.params else '')
  231. def _query_str(self):
  232. return (
  233. ''.join(['?', self._param_str_join(self.query, "&")])
  234. if self.query else '')
  235. def _param_str_split(self, string, elmdelim, kvdelim="="):
  236. ret = collections.OrderedDict()
  237. for k, v in [x.split(kvdelim, 1) for x in string.split(elmdelim) if x]:
  238. ret[k] = v
  239. return ret
  240. def _param_str_join(self, dict_, elmdelim, kvdelim="="):
  241. return elmdelim.join([kvdelim.join([k, v]) for k, v in dict_.items()])
  242. @property
  243. def hostport(self):
  244. if not self.port:
  245. return self.hostname
  246. return "%s:%d" % (self.hostname, self.port)
  247. @property
  248. def path_quoted(self):
  249. return urllib.parse.quote(self.path)
  250. @path_quoted.setter
  251. def path_quoted(self, path):
  252. self.path = urllib.parse.unquote(path)
  253. @property
  254. def path(self):
  255. return self._path
  256. @path.setter
  257. def path(self, path):
  258. self._path = path
  259. if not path or re.compile("^/").match(path):
  260. self.relative = False
  261. else:
  262. self.relative = True
  263. @property
  264. def username(self):
  265. if self.userinfo:
  266. return (self.userinfo.split(":", 1))[0]
  267. return ''
  268. @username.setter
  269. def username(self, username):
  270. password = self.password
  271. self.userinfo = username
  272. if password:
  273. self.userinfo += ":%s" % password
  274. @property
  275. def password(self):
  276. if self.userinfo and ":" in self.userinfo:
  277. return (self.userinfo.split(":", 1))[1]
  278. return ''
  279. @password.setter
  280. def password(self, password):
  281. self.userinfo = "%s:%s" % (self.username, password)
  282. def decodeurl(url):
  283. """Decodes an URL into the tokens (scheme, network location, path,
  284. user, password, parameters).
  285. """
  286. m = re.compile('(?P<type>[^:]*)://((?P<user>[^/;]+)@)?(?P<location>[^;]+)(;(?P<parm>.*))?').match(url)
  287. if not m:
  288. raise MalformedUrl(url)
  289. type = m.group('type')
  290. location = m.group('location')
  291. if not location:
  292. raise MalformedUrl(url)
  293. user = m.group('user')
  294. parm = m.group('parm')
  295. locidx = location.find('/')
  296. if locidx != -1 and type.lower() != 'file':
  297. host = location[:locidx]
  298. path = location[locidx:]
  299. elif type.lower() == 'file':
  300. host = ""
  301. path = location
  302. else:
  303. host = location
  304. path = "/"
  305. if user:
  306. m = re.compile('(?P<user>[^:]+)(:?(?P<pswd>.*))').match(user)
  307. if m:
  308. user = m.group('user')
  309. pswd = m.group('pswd')
  310. else:
  311. user = ''
  312. pswd = ''
  313. p = collections.OrderedDict()
  314. if parm:
  315. for s in parm.split(';'):
  316. if s:
  317. if not '=' in s:
  318. raise MalformedUrl(url, "The URL: '%s' is invalid: parameter %s does not specify a value (missing '=')" % (url, s))
  319. s1, s2 = s.split('=')
  320. p[s1] = s2
  321. return type, host, urllib.parse.unquote(path), user, pswd, p
  322. def encodeurl(decoded):
  323. """Encodes a URL from tokens (scheme, network location, path,
  324. user, password, parameters).
  325. """
  326. type, host, path, user, pswd, p = decoded
  327. if not type:
  328. raise MissingParameterError('type', "encoded from the data %s" % str(decoded))
  329. url = ['%s://' % type]
  330. if user and type != "file":
  331. url.append("%s" % user)
  332. if pswd:
  333. url.append(":%s" % pswd)
  334. url.append("@")
  335. if host and type != "file":
  336. url.append("%s" % host)
  337. if path:
  338. # Standardise path to ensure comparisons work
  339. while '//' in path:
  340. path = path.replace("//", "/")
  341. url.append("%s" % urllib.parse.quote(path))
  342. if p:
  343. for parm in p:
  344. url.append(";%s=%s" % (parm, p[parm]))
  345. return "".join(url)
  346. def uri_replace(ud, uri_find, uri_replace, replacements, d, mirrortarball=None):
  347. if not ud.url or not uri_find or not uri_replace:
  348. logger.error("uri_replace: passed an undefined value, not replacing")
  349. return None
  350. uri_decoded = list(decodeurl(ud.url))
  351. uri_find_decoded = list(decodeurl(uri_find))
  352. uri_replace_decoded = list(decodeurl(uri_replace))
  353. logger.debug2("For url %s comparing %s to %s" % (uri_decoded, uri_find_decoded, uri_replace_decoded))
  354. result_decoded = ['', '', '', '', '', {}]
  355. # 0 - type, 1 - host, 2 - path, 3 - user, 4- pswd, 5 - params
  356. for loc, i in enumerate(uri_find_decoded):
  357. result_decoded[loc] = uri_decoded[loc]
  358. regexp = i
  359. if loc == 0 and regexp and not regexp.endswith("$"):
  360. # Leaving the type unanchored can mean "https" matching "file" can become "files"
  361. # which is clearly undesirable.
  362. regexp += "$"
  363. if loc == 5:
  364. # Handle URL parameters
  365. if i:
  366. # Any specified URL parameters must match
  367. for k in uri_find_decoded[loc]:
  368. if uri_decoded[loc][k] != uri_find_decoded[loc][k]:
  369. return None
  370. # Overwrite any specified replacement parameters
  371. for k in uri_replace_decoded[loc]:
  372. for l in replacements:
  373. uri_replace_decoded[loc][k] = uri_replace_decoded[loc][k].replace(l, replacements[l])
  374. result_decoded[loc][k] = uri_replace_decoded[loc][k]
  375. elif (loc == 3 or loc == 4) and uri_replace_decoded[loc]:
  376. # User/password in the replacement is just a straight replacement
  377. result_decoded[loc] = uri_replace_decoded[loc]
  378. elif (re.match(regexp, uri_decoded[loc])):
  379. if not uri_replace_decoded[loc]:
  380. result_decoded[loc] = ""
  381. else:
  382. for k in replacements:
  383. uri_replace_decoded[loc] = uri_replace_decoded[loc].replace(k, replacements[k])
  384. #bb.note("%s %s %s" % (regexp, uri_replace_decoded[loc], uri_decoded[loc]))
  385. result_decoded[loc] = re.sub(regexp, uri_replace_decoded[loc], uri_decoded[loc], 1)
  386. if loc == 2:
  387. # Handle path manipulations
  388. basename = None
  389. if uri_decoded[0] != uri_replace_decoded[0] and mirrortarball:
  390. # If the source and destination url types differ, must be a mirrortarball mapping
  391. basename = os.path.basename(mirrortarball)
  392. # Kill parameters, they make no sense for mirror tarballs
  393. uri_decoded[5] = {}
  394. elif ud.localpath and ud.method.supports_checksum(ud):
  395. basename = os.path.basename(ud.localpath)
  396. if basename:
  397. uri_basename = os.path.basename(uri_decoded[loc])
  398. # Prefix with a slash as a sentinel in case
  399. # result_decoded[loc] does not contain one.
  400. path = "/" + result_decoded[loc]
  401. if uri_basename and basename != uri_basename and path.endswith("/" + uri_basename):
  402. result_decoded[loc] = path[1:-len(uri_basename)] + basename
  403. elif not path.endswith("/" + basename):
  404. result_decoded[loc] = os.path.join(path[1:], basename)
  405. else:
  406. return None
  407. result = encodeurl(result_decoded)
  408. if result == ud.url:
  409. return None
  410. logger.debug2("For url %s returning %s" % (ud.url, result))
  411. return result
  412. methods = []
  413. urldata_cache = {}
  414. saved_headrevs = {}
  415. def fetcher_init(d):
  416. """
  417. Called to initialize the fetchers once the configuration data is known.
  418. Calls before this must not hit the cache.
  419. """
  420. revs = bb.persist_data.persist('BB_URI_HEADREVS', d)
  421. try:
  422. # fetcher_init is called multiple times, so make sure we only save the
  423. # revs the first time it is called.
  424. if not bb.fetch2.saved_headrevs:
  425. bb.fetch2.saved_headrevs = dict(revs)
  426. except:
  427. pass
  428. # When to drop SCM head revisions controlled by user policy
  429. srcrev_policy = d.getVar('BB_SRCREV_POLICY') or "clear"
  430. if srcrev_policy == "cache":
  431. logger.debug("Keeping SRCREV cache due to cache policy of: %s", srcrev_policy)
  432. elif srcrev_policy == "clear":
  433. logger.debug("Clearing SRCREV cache due to cache policy of: %s", srcrev_policy)
  434. revs.clear()
  435. else:
  436. raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy)
  437. _checksum_cache.init_cache(d)
  438. for m in methods:
  439. if hasattr(m, "init"):
  440. m.init(d)
  441. def fetcher_parse_save():
  442. _checksum_cache.save_extras()
  443. def fetcher_parse_done():
  444. _checksum_cache.save_merge()
  445. def fetcher_compare_revisions(d):
  446. """
  447. Compare the revisions in the persistent cache with the saved values from
  448. when bitbake was started and return true if they have changed.
  449. """
  450. headrevs = dict(bb.persist_data.persist('BB_URI_HEADREVS', d))
  451. return headrevs != bb.fetch2.saved_headrevs
  452. def mirror_from_string(data):
  453. mirrors = (data or "").replace('\\n',' ').split()
  454. # Split into pairs
  455. if len(mirrors) % 2 != 0:
  456. bb.warn('Invalid mirror data %s, should have paired members.' % data)
  457. return list(zip(*[iter(mirrors)]*2))
  458. def verify_checksum(ud, d, precomputed={}):
  459. """
  460. verify the MD5 and SHA256 checksum for downloaded src
  461. Raises a FetchError if one or both of the SRC_URI checksums do not match
  462. the downloaded file, or if BB_STRICT_CHECKSUM is set and there are no
  463. checksums specified.
  464. Returns a dict of checksums that can be stored in a done stamp file and
  465. passed in as precomputed parameter in a later call to avoid re-computing
  466. the checksums from the file. This allows verifying the checksums of the
  467. file against those in the recipe each time, rather than only after
  468. downloading. See https://bugzilla.yoctoproject.org/show_bug.cgi?id=5571.
  469. """
  470. if ud.ignore_checksums or not ud.method.supports_checksum(ud):
  471. return {}
  472. def compute_checksum_info(checksum_id):
  473. checksum_name = getattr(ud, "%s_name" % checksum_id)
  474. if checksum_id in precomputed:
  475. checksum_data = precomputed[checksum_id]
  476. else:
  477. checksum_data = getattr(bb.utils, "%s_file" % checksum_id)(ud.localpath)
  478. checksum_expected = getattr(ud, "%s_expected" % checksum_id)
  479. if checksum_expected == '':
  480. checksum_expected = None
  481. return {
  482. "id": checksum_id,
  483. "name": checksum_name,
  484. "data": checksum_data,
  485. "expected": checksum_expected
  486. }
  487. checksum_infos = []
  488. for checksum_id in CHECKSUM_LIST:
  489. checksum_infos.append(compute_checksum_info(checksum_id))
  490. checksum_dict = {ci["id"] : ci["data"] for ci in checksum_infos}
  491. checksum_event = {"%ssum" % ci["id"] : ci["data"] for ci in checksum_infos}
  492. for ci in checksum_infos:
  493. if ci["id"] in SHOWN_CHECKSUM_LIST:
  494. checksum_lines = ["SRC_URI[%s] = \"%s\"" % (ci["name"], ci["data"])]
  495. # If no checksum has been provided
  496. if ud.method.recommends_checksum(ud) and all(ci["expected"] is None for ci in checksum_infos):
  497. messages = []
  498. strict = d.getVar("BB_STRICT_CHECKSUM") or "0"
  499. # If strict checking enabled and neither sum defined, raise error
  500. if strict == "1":
  501. messages.append("No checksum specified for '%s', please add at " \
  502. "least one to the recipe:" % ud.localpath)
  503. messages.extend(checksum_lines)
  504. logger.error("\n".join(messages))
  505. raise NoChecksumError("Missing SRC_URI checksum", ud.url)
  506. bb.event.fire(MissingChecksumEvent(ud.url, **checksum_event), d)
  507. if strict == "ignore":
  508. return checksum_dict
  509. # Log missing sums so user can more easily add them
  510. messages.append("Missing checksum for '%s', consider adding at " \
  511. "least one to the recipe:" % ud.localpath)
  512. messages.extend(checksum_lines)
  513. logger.warning("\n".join(messages))
  514. # We want to alert the user if a checksum is defined in the recipe but
  515. # it does not match.
  516. messages = []
  517. messages.append("Checksum mismatch!")
  518. bad_checksum = None
  519. for ci in checksum_infos:
  520. if ci["expected"] and ci["expected"] != ci["data"]:
  521. messages.append("File: '%s' has %s checksum '%s' when '%s' was " \
  522. "expected" % (ud.localpath, ci["id"], ci["data"], ci["expected"]))
  523. bad_checksum = ci["data"]
  524. if bad_checksum:
  525. messages.append("If this change is expected (e.g. you have upgraded " \
  526. "to a new version without updating the checksums) " \
  527. "then you can use these lines within the recipe:")
  528. messages.extend(checksum_lines)
  529. messages.append("Otherwise you should retry the download and/or " \
  530. "check with upstream to determine if the file has " \
  531. "become corrupted or otherwise unexpectedly modified.")
  532. raise ChecksumError("\n".join(messages), ud.url, bad_checksum)
  533. return checksum_dict
  534. def verify_donestamp(ud, d, origud=None):
  535. """
  536. Check whether the done stamp file has the right checksums (if the fetch
  537. method supports them). If it doesn't, delete the done stamp and force
  538. a re-download.
  539. Returns True, if the donestamp exists and is valid, False otherwise. When
  540. returning False, any existing done stamps are removed.
  541. """
  542. if not ud.needdonestamp or (origud and not origud.needdonestamp):
  543. return True
  544. if not os.path.exists(ud.localpath):
  545. # local path does not exist
  546. if os.path.exists(ud.donestamp):
  547. # done stamp exists, but the downloaded file does not; the done stamp
  548. # must be incorrect, re-trigger the download
  549. bb.utils.remove(ud.donestamp)
  550. return False
  551. if (not ud.method.supports_checksum(ud) or
  552. (origud and not origud.method.supports_checksum(origud))):
  553. # if done stamp exists and checksums not supported; assume the local
  554. # file is current
  555. return os.path.exists(ud.donestamp)
  556. precomputed_checksums = {}
  557. # Only re-use the precomputed checksums if the donestamp is newer than the
  558. # file. Do not rely on the mtime of directories, though. If ud.localpath is
  559. # a directory, there will probably not be any checksums anyway.
  560. if os.path.exists(ud.donestamp) and (os.path.isdir(ud.localpath) or
  561. os.path.getmtime(ud.localpath) < os.path.getmtime(ud.donestamp)):
  562. try:
  563. with open(ud.donestamp, "rb") as cachefile:
  564. pickled = pickle.Unpickler(cachefile)
  565. precomputed_checksums.update(pickled.load())
  566. except Exception as e:
  567. # Avoid the warnings on the upgrade path from emtpy done stamp
  568. # files to those containing the checksums.
  569. if not isinstance(e, EOFError):
  570. # Ignore errors, they aren't fatal
  571. logger.warning("Couldn't load checksums from donestamp %s: %s "
  572. "(msg: %s)" % (ud.donestamp, type(e).__name__,
  573. str(e)))
  574. try:
  575. checksums = verify_checksum(ud, d, precomputed_checksums)
  576. # If the cache file did not have the checksums, compute and store them
  577. # as an upgrade path from the previous done stamp file format.
  578. if checksums != precomputed_checksums:
  579. with open(ud.donestamp, "wb") as cachefile:
  580. p = pickle.Pickler(cachefile, 2)
  581. p.dump(checksums)
  582. return True
  583. except ChecksumError as e:
  584. # Checksums failed to verify, trigger re-download and remove the
  585. # incorrect stamp file.
  586. logger.warning("Checksum mismatch for local file %s\n"
  587. "Cleaning and trying again." % ud.localpath)
  588. if os.path.exists(ud.localpath):
  589. rename_bad_checksum(ud, e.checksum)
  590. bb.utils.remove(ud.donestamp)
  591. return False
  592. def update_stamp(ud, d):
  593. """
  594. donestamp is file stamp indicating the whole fetching is done
  595. this function update the stamp after verifying the checksum
  596. """
  597. if not ud.needdonestamp:
  598. return
  599. if os.path.exists(ud.donestamp):
  600. # Touch the done stamp file to show active use of the download
  601. try:
  602. os.utime(ud.donestamp, None)
  603. except:
  604. # Errors aren't fatal here
  605. pass
  606. else:
  607. try:
  608. checksums = verify_checksum(ud, d)
  609. # Store the checksums for later re-verification against the recipe
  610. with open(ud.donestamp, "wb") as cachefile:
  611. p = pickle.Pickler(cachefile, 2)
  612. p.dump(checksums)
  613. except ChecksumError as e:
  614. # Checksums failed to verify, trigger re-download and remove the
  615. # incorrect stamp file.
  616. logger.warning("Checksum mismatch for local file %s\n"
  617. "Cleaning and trying again." % ud.localpath)
  618. if os.path.exists(ud.localpath):
  619. rename_bad_checksum(ud, e.checksum)
  620. bb.utils.remove(ud.donestamp)
  621. raise
  622. def subprocess_setup():
  623. # Python installs a SIGPIPE handler by default. This is usually not what
  624. # non-Python subprocesses expect.
  625. # SIGPIPE errors are known issues with gzip/bash
  626. signal.signal(signal.SIGPIPE, signal.SIG_DFL)
  627. def get_autorev(d):
  628. # only not cache src rev in autorev case
  629. if d.getVar('BB_SRCREV_POLICY') != "cache":
  630. d.setVar('BB_DONT_CACHE', '1')
  631. return "AUTOINC"
  632. def get_srcrev(d, method_name='sortable_revision'):
  633. """
  634. Return the revision string, usually for use in the version string (PV) of the current package
  635. Most packages usually only have one SCM so we just pass on the call.
  636. In the multi SCM case, we build a value based on SRCREV_FORMAT which must
  637. have been set.
  638. The idea here is that we put the string "AUTOINC+" into return value if the revisions are not
  639. incremental, other code is then responsible for turning that into an increasing value (if needed)
  640. A method_name can be supplied to retrieve an alternatively formatted revision from a fetcher, if
  641. that fetcher provides a method with the given name and the same signature as sortable_revision.
  642. """
  643. d.setVar("__BBSEENSRCREV", "1")
  644. recursion = d.getVar("__BBINSRCREV")
  645. if recursion:
  646. raise FetchError("There are recursive references in fetcher variables, likely through SRC_URI")
  647. d.setVar("__BBINSRCREV", True)
  648. scms = []
  649. fetcher = Fetch(d.getVar('SRC_URI').split(), d)
  650. urldata = fetcher.ud
  651. for u in urldata:
  652. if urldata[u].method.supports_srcrev():
  653. scms.append(u)
  654. if not scms:
  655. raise FetchError("SRCREV was used yet no valid SCM was found in SRC_URI")
  656. if len(scms) == 1 and len(urldata[scms[0]].names) == 1:
  657. autoinc, rev = getattr(urldata[scms[0]].method, method_name)(urldata[scms[0]], d, urldata[scms[0]].names[0])
  658. if len(rev) > 10:
  659. rev = rev[:10]
  660. d.delVar("__BBINSRCREV")
  661. if autoinc:
  662. return "AUTOINC+" + rev
  663. return rev
  664. #
  665. # Mutiple SCMs are in SRC_URI so we resort to SRCREV_FORMAT
  666. #
  667. format = d.getVar('SRCREV_FORMAT')
  668. if not format:
  669. raise FetchError("The SRCREV_FORMAT variable must be set when multiple SCMs are used.\n"\
  670. "The SCMs are:\n%s" % '\n'.join(scms))
  671. name_to_rev = {}
  672. seenautoinc = False
  673. for scm in scms:
  674. ud = urldata[scm]
  675. for name in ud.names:
  676. autoinc, rev = getattr(ud.method, method_name)(ud, d, name)
  677. seenautoinc = seenautoinc or autoinc
  678. if len(rev) > 10:
  679. rev = rev[:10]
  680. name_to_rev[name] = rev
  681. # Replace names by revisions in the SRCREV_FORMAT string. The approach used
  682. # here can handle names being prefixes of other names and names appearing
  683. # as substrings in revisions (in which case the name should not be
  684. # expanded). The '|' regular expression operator tries matches from left to
  685. # right, so we need to sort the names with the longest ones first.
  686. names_descending_len = sorted(name_to_rev, key=len, reverse=True)
  687. name_to_rev_re = "|".join(re.escape(name) for name in names_descending_len)
  688. format = re.sub(name_to_rev_re, lambda match: name_to_rev[match.group(0)], format)
  689. if seenautoinc:
  690. format = "AUTOINC+" + format
  691. d.delVar("__BBINSRCREV")
  692. return format
  693. def localpath(url, d):
  694. fetcher = bb.fetch2.Fetch([url], d)
  695. return fetcher.localpath(url)
  696. # Need to export PATH as binary could be in metadata paths
  697. # rather than host provided
  698. # Also include some other variables.
  699. FETCH_EXPORT_VARS = ['HOME', 'PATH',
  700. 'HTTP_PROXY', 'http_proxy',
  701. 'HTTPS_PROXY', 'https_proxy',
  702. 'FTP_PROXY', 'ftp_proxy',
  703. 'FTPS_PROXY', 'ftps_proxy',
  704. 'NO_PROXY', 'no_proxy',
  705. 'ALL_PROXY', 'all_proxy',
  706. 'GIT_PROXY_COMMAND',
  707. 'GIT_SSH',
  708. 'GIT_SSH_COMMAND',
  709. 'GIT_SSL_CAINFO',
  710. 'GIT_SMART_HTTP',
  711. 'SSH_AUTH_SOCK', 'SSH_AGENT_PID',
  712. 'SOCKS5_USER', 'SOCKS5_PASSWD',
  713. 'DBUS_SESSION_BUS_ADDRESS',
  714. 'P4CONFIG',
  715. 'SSL_CERT_FILE',
  716. 'AWS_PROFILE',
  717. 'AWS_ACCESS_KEY_ID',
  718. 'AWS_SECRET_ACCESS_KEY',
  719. 'AWS_DEFAULT_REGION']
  720. def get_fetcher_environment(d):
  721. newenv = {}
  722. origenv = d.getVar("BB_ORIGENV")
  723. for name in bb.fetch2.FETCH_EXPORT_VARS:
  724. value = d.getVar(name)
  725. if not value and origenv:
  726. value = origenv.getVar(name)
  727. if value:
  728. newenv[name] = value
  729. return newenv
  730. def runfetchcmd(cmd, d, quiet=False, cleanup=None, log=None, workdir=None):
  731. """
  732. Run cmd returning the command output
  733. Raise an error if interrupted or cmd fails
  734. Optionally echo command output to stdout
  735. Optionally remove the files/directories listed in cleanup upon failure
  736. """
  737. exportvars = FETCH_EXPORT_VARS
  738. if not cleanup:
  739. cleanup = []
  740. # If PATH contains WORKDIR which contains PV-PR which contains SRCPV we
  741. # can end up in circular recursion here so give the option of breaking it
  742. # in a data store copy.
  743. try:
  744. d.getVar("PV")
  745. d.getVar("PR")
  746. except bb.data_smart.ExpansionError:
  747. d = bb.data.createCopy(d)
  748. d.setVar("PV", "fetcheravoidrecurse")
  749. d.setVar("PR", "fetcheravoidrecurse")
  750. origenv = d.getVar("BB_ORIGENV", False)
  751. for var in exportvars:
  752. val = d.getVar(var) or (origenv and origenv.getVar(var))
  753. if val:
  754. cmd = 'export ' + var + '=\"%s\"; %s' % (val, cmd)
  755. # Disable pseudo as it may affect ssh, potentially causing it to hang.
  756. cmd = 'export PSEUDO_DISABLED=1; ' + cmd
  757. if workdir:
  758. logger.debug("Running '%s' in %s" % (cmd, workdir))
  759. else:
  760. logger.debug("Running %s", cmd)
  761. success = False
  762. error_message = ""
  763. try:
  764. (output, errors) = bb.process.run(cmd, log=log, shell=True, stderr=subprocess.PIPE, cwd=workdir)
  765. success = True
  766. except bb.process.NotFoundError as e:
  767. error_message = "Fetch command %s not found" % (e.command)
  768. except bb.process.ExecutionError as e:
  769. if e.stdout:
  770. output = "output:\n%s\n%s" % (e.stdout, e.stderr)
  771. elif e.stderr:
  772. output = "output:\n%s" % e.stderr
  773. else:
  774. output = "no output"
  775. error_message = "Fetch command %s failed with exit code %s, %s" % (e.command, e.exitcode, output)
  776. except bb.process.CmdError as e:
  777. error_message = "Fetch command %s could not be run:\n%s" % (e.command, e.msg)
  778. if not success:
  779. for f in cleanup:
  780. try:
  781. bb.utils.remove(f, True)
  782. except OSError:
  783. pass
  784. raise FetchError(error_message)
  785. return output
  786. def check_network_access(d, info, url):
  787. """
  788. log remote network access, and error if BB_NO_NETWORK is set or the given
  789. URI is untrusted
  790. """
  791. if bb.utils.to_boolean(d.getVar("BB_NO_NETWORK")):
  792. raise NetworkAccess(url, info)
  793. elif not trusted_network(d, url):
  794. raise UntrustedUrl(url, info)
  795. else:
  796. logger.debug("Fetcher accessed the network with the command %s" % info)
  797. def build_mirroruris(origud, mirrors, ld):
  798. uris = []
  799. uds = []
  800. replacements = {}
  801. replacements["TYPE"] = origud.type
  802. replacements["HOST"] = origud.host
  803. replacements["PATH"] = origud.path
  804. replacements["BASENAME"] = origud.path.split("/")[-1]
  805. replacements["MIRRORNAME"] = origud.host.replace(':','.') + origud.path.replace('/', '.').replace('*', '.')
  806. def adduri(ud, uris, uds, mirrors, tarballs):
  807. for line in mirrors:
  808. try:
  809. (find, replace) = line
  810. except ValueError:
  811. continue
  812. for tarball in tarballs:
  813. newuri = uri_replace(ud, find, replace, replacements, ld, tarball)
  814. if not newuri or newuri in uris or newuri == origud.url:
  815. continue
  816. if not trusted_network(ld, newuri):
  817. logger.debug("Mirror %s not in the list of trusted networks, skipping" % (newuri))
  818. continue
  819. # Create a local copy of the mirrors minus the current line
  820. # this will prevent us from recursively processing the same line
  821. # as well as indirect recursion A -> B -> C -> A
  822. localmirrors = list(mirrors)
  823. localmirrors.remove(line)
  824. try:
  825. newud = FetchData(newuri, ld)
  826. newud.setup_localpath(ld)
  827. except bb.fetch2.BBFetchException as e:
  828. logger.debug("Mirror fetch failure for url %s (original url: %s)" % (newuri, origud.url))
  829. logger.debug(str(e))
  830. try:
  831. # setup_localpath of file:// urls may fail, we should still see
  832. # if mirrors of the url exist
  833. adduri(newud, uris, uds, localmirrors, tarballs)
  834. except UnboundLocalError:
  835. pass
  836. continue
  837. uris.append(newuri)
  838. uds.append(newud)
  839. adduri(newud, uris, uds, localmirrors, tarballs)
  840. adduri(origud, uris, uds, mirrors, origud.mirrortarballs or [None])
  841. return uris, uds
  842. def rename_bad_checksum(ud, suffix):
  843. """
  844. Renames files to have suffix from parameter
  845. """
  846. if ud.localpath is None:
  847. return
  848. new_localpath = "%s_bad-checksum_%s" % (ud.localpath, suffix)
  849. bb.warn("Renaming %s to %s" % (ud.localpath, new_localpath))
  850. if not bb.utils.movefile(ud.localpath, new_localpath):
  851. bb.warn("Renaming %s to %s failed, grep movefile in log.do_fetch to see why" % (ud.localpath, new_localpath))
  852. def try_mirror_url(fetch, origud, ud, ld, check = False):
  853. # Return of None or a value means we're finished
  854. # False means try another url
  855. if ud.lockfile and ud.lockfile != origud.lockfile:
  856. lf = bb.utils.lockfile(ud.lockfile)
  857. try:
  858. if check:
  859. found = ud.method.checkstatus(fetch, ud, ld)
  860. if found:
  861. return found
  862. return False
  863. if not verify_donestamp(ud, ld, origud) or ud.method.need_update(ud, ld):
  864. ud.method.download(ud, ld)
  865. if hasattr(ud.method,"build_mirror_data"):
  866. ud.method.build_mirror_data(ud, ld)
  867. if not ud.localpath or not os.path.exists(ud.localpath):
  868. return False
  869. if ud.localpath == origud.localpath:
  870. return ud.localpath
  871. # We may be obtaining a mirror tarball which needs further processing by the real fetcher
  872. # If that tarball is a local file:// we need to provide a symlink to it
  873. dldir = ld.getVar("DL_DIR")
  874. if origud.mirrortarballs and os.path.basename(ud.localpath) in origud.mirrortarballs and os.path.basename(ud.localpath) != os.path.basename(origud.localpath):
  875. # Create donestamp in old format to avoid triggering a re-download
  876. if ud.donestamp:
  877. bb.utils.mkdirhier(os.path.dirname(ud.donestamp))
  878. open(ud.donestamp, 'w').close()
  879. dest = os.path.join(dldir, os.path.basename(ud.localpath))
  880. if not os.path.exists(dest):
  881. # In case this is executing without any file locks held (as is
  882. # the case for file:// URLs), two tasks may end up here at the
  883. # same time, in which case we do not want the second task to
  884. # fail when the link has already been created by the first task.
  885. try:
  886. os.symlink(ud.localpath, dest)
  887. except FileExistsError:
  888. pass
  889. if not verify_donestamp(origud, ld) or origud.method.need_update(origud, ld):
  890. origud.method.download(origud, ld)
  891. if hasattr(origud.method, "build_mirror_data"):
  892. origud.method.build_mirror_data(origud, ld)
  893. return origud.localpath
  894. # Otherwise the result is a local file:// and we symlink to it
  895. ensure_symlink(ud.localpath, origud.localpath)
  896. update_stamp(origud, ld)
  897. return ud.localpath
  898. except bb.fetch2.NetworkAccess:
  899. raise
  900. except IOError as e:
  901. if e.errno in [errno.ESTALE]:
  902. logger.warning("Stale Error Observed %s." % ud.url)
  903. return False
  904. raise
  905. except bb.fetch2.BBFetchException as e:
  906. if isinstance(e, ChecksumError):
  907. logger.warning("Mirror checksum failure for url %s (original url: %s)\nCleaning and trying again." % (ud.url, origud.url))
  908. logger.warning(str(e))
  909. if os.path.exists(ud.localpath):
  910. rename_bad_checksum(ud, e.checksum)
  911. elif isinstance(e, NoChecksumError):
  912. raise
  913. else:
  914. logger.debug("Mirror fetch failure for url %s (original url: %s)" % (ud.url, origud.url))
  915. logger.debug(str(e))
  916. try:
  917. ud.method.clean(ud, ld)
  918. except UnboundLocalError:
  919. pass
  920. return False
  921. finally:
  922. if ud.lockfile and ud.lockfile != origud.lockfile:
  923. bb.utils.unlockfile(lf)
  924. def ensure_symlink(target, link_name):
  925. if not os.path.exists(link_name):
  926. dirname = os.path.dirname(link_name)
  927. bb.utils.mkdirhier(dirname)
  928. if os.path.islink(link_name):
  929. # Broken symbolic link
  930. os.unlink(link_name)
  931. # In case this is executing without any file locks held (as is
  932. # the case for file:// URLs), two tasks may end up here at the
  933. # same time, in which case we do not want the second task to
  934. # fail when the link has already been created by the first task.
  935. try:
  936. os.symlink(target, link_name)
  937. except FileExistsError:
  938. pass
  939. def try_mirrors(fetch, d, origud, mirrors, check = False):
  940. """
  941. Try to use a mirrored version of the sources.
  942. This method will be automatically called before the fetchers go.
  943. d Is a bb.data instance
  944. uri is the original uri we're trying to download
  945. mirrors is the list of mirrors we're going to try
  946. """
  947. ld = d.createCopy()
  948. uris, uds = build_mirroruris(origud, mirrors, ld)
  949. for index, uri in enumerate(uris):
  950. ret = try_mirror_url(fetch, origud, uds[index], ld, check)
  951. if ret:
  952. return ret
  953. return None
  954. def trusted_network(d, url):
  955. """
  956. Use a trusted url during download if networking is enabled and
  957. BB_ALLOWED_NETWORKS is set globally or for a specific recipe.
  958. Note: modifies SRC_URI & mirrors.
  959. """
  960. if bb.utils.to_boolean(d.getVar("BB_NO_NETWORK")):
  961. return True
  962. pkgname = d.expand(d.getVar('PN', False))
  963. trusted_hosts = None
  964. if pkgname:
  965. trusted_hosts = d.getVarFlag('BB_ALLOWED_NETWORKS', pkgname, False)
  966. if not trusted_hosts:
  967. trusted_hosts = d.getVar('BB_ALLOWED_NETWORKS')
  968. # Not enabled.
  969. if not trusted_hosts:
  970. return True
  971. scheme, network, path, user, passwd, param = decodeurl(url)
  972. if not network:
  973. return True
  974. network = network.split(':')[0]
  975. network = network.lower()
  976. for host in trusted_hosts.split(" "):
  977. host = host.lower()
  978. if host.startswith("*.") and ("." + network).endswith(host[1:]):
  979. return True
  980. if host == network:
  981. return True
  982. return False
  983. def srcrev_internal_helper(ud, d, name):
  984. """
  985. Return:
  986. a) a source revision if specified
  987. b) latest revision if SRCREV="AUTOINC"
  988. c) None if not specified
  989. """
  990. srcrev = None
  991. pn = d.getVar("PN")
  992. attempts = []
  993. if name != '' and pn:
  994. attempts.append("SRCREV_%s:pn-%s" % (name, pn))
  995. if name != '':
  996. attempts.append("SRCREV_%s" % name)
  997. if pn:
  998. attempts.append("SRCREV:pn-%s" % pn)
  999. attempts.append("SRCREV")
  1000. for a in attempts:
  1001. srcrev = d.getVar(a)
  1002. if srcrev and srcrev != "INVALID":
  1003. break
  1004. if 'rev' in ud.parm and 'tag' in ud.parm:
  1005. raise FetchError("Please specify a ;rev= parameter or a ;tag= parameter in the url %s but not both." % (ud.url))
  1006. if 'rev' in ud.parm or 'tag' in ud.parm:
  1007. if 'rev' in ud.parm:
  1008. parmrev = ud.parm['rev']
  1009. else:
  1010. parmrev = ud.parm['tag']
  1011. if srcrev == "INVALID" or not srcrev:
  1012. return parmrev
  1013. if srcrev != parmrev:
  1014. raise FetchError("Conflicting revisions (%s from SRCREV and %s from the url) found, please specify one valid value" % (srcrev, parmrev))
  1015. return parmrev
  1016. if srcrev == "INVALID" or not srcrev:
  1017. raise FetchError("Please set a valid SRCREV for url %s (possible key names are %s, or use a ;rev=X URL parameter)" % (str(attempts), ud.url), ud.url)
  1018. if srcrev == "AUTOINC":
  1019. srcrev = ud.method.latest_revision(ud, d, name)
  1020. return srcrev
  1021. def get_checksum_file_list(d):
  1022. """ Get a list of files checksum in SRC_URI
  1023. Returns the resolved local paths of all local file entries in
  1024. SRC_URI as a space-separated string
  1025. """
  1026. fetch = Fetch([], d, cache = False, localonly = True)
  1027. filelist = []
  1028. for u in fetch.urls:
  1029. ud = fetch.ud[u]
  1030. if ud and isinstance(ud.method, local.Local):
  1031. found = False
  1032. paths = ud.method.localpaths(ud, d)
  1033. for f in paths:
  1034. pth = ud.decodedurl
  1035. if os.path.exists(f):
  1036. found = True
  1037. filelist.append(f + ":" + str(os.path.exists(f)))
  1038. if not found:
  1039. bb.fatal(("Unable to get checksum for %s SRC_URI entry %s: file could not be found"
  1040. "\nThe following paths were searched:"
  1041. "\n%s") % (d.getVar('BPN'), os.path.basename(f), '\n'.join(paths)))
  1042. return " ".join(filelist)
  1043. def get_file_checksums(filelist, pn, localdirsexclude):
  1044. """Get a list of the checksums for a list of local files
  1045. Returns the checksums for a list of local files, caching the results as
  1046. it proceeds
  1047. """
  1048. return _checksum_cache.get_checksums(filelist, pn, localdirsexclude)
  1049. class FetchData(object):
  1050. """
  1051. A class which represents the fetcher state for a given URI.
  1052. """
  1053. def __init__(self, url, d, localonly = False):
  1054. # localpath is the location of a downloaded result. If not set, the file is local.
  1055. self.donestamp = None
  1056. self.needdonestamp = True
  1057. self.localfile = ""
  1058. self.localpath = None
  1059. self.lockfile = None
  1060. self.mirrortarballs = []
  1061. self.basename = None
  1062. self.basepath = None
  1063. (self.type, self.host, self.path, self.user, self.pswd, self.parm) = decodeurl(d.expand(url))
  1064. self.date = self.getSRCDate(d)
  1065. self.url = url
  1066. if not self.user and "user" in self.parm:
  1067. self.user = self.parm["user"]
  1068. if not self.pswd and "pswd" in self.parm:
  1069. self.pswd = self.parm["pswd"]
  1070. self.setup = False
  1071. def configure_checksum(checksum_id):
  1072. if "name" in self.parm:
  1073. checksum_name = "%s.%ssum" % (self.parm["name"], checksum_id)
  1074. else:
  1075. checksum_name = "%ssum" % checksum_id
  1076. setattr(self, "%s_name" % checksum_id, checksum_name)
  1077. if checksum_name in self.parm:
  1078. checksum_expected = self.parm[checksum_name]
  1079. elif self.type not in ["http", "https", "ftp", "ftps", "sftp", "s3", "az"]:
  1080. checksum_expected = None
  1081. else:
  1082. checksum_expected = d.getVarFlag("SRC_URI", checksum_name)
  1083. setattr(self, "%s_expected" % checksum_id, checksum_expected)
  1084. for checksum_id in CHECKSUM_LIST:
  1085. configure_checksum(checksum_id)
  1086. self.ignore_checksums = False
  1087. self.names = self.parm.get("name",'default').split(',')
  1088. self.method = None
  1089. for m in methods:
  1090. if m.supports(self, d):
  1091. self.method = m
  1092. break
  1093. if not self.method:
  1094. raise NoMethodError(url)
  1095. if localonly and not isinstance(self.method, local.Local):
  1096. raise NonLocalMethod()
  1097. if self.parm.get("proto", None) and "protocol" not in self.parm:
  1098. logger.warning('Consider updating %s recipe to use "protocol" not "proto" in SRC_URI.', d.getVar('PN'))
  1099. self.parm["protocol"] = self.parm.get("proto", None)
  1100. if hasattr(self.method, "urldata_init"):
  1101. self.method.urldata_init(self, d)
  1102. if "localpath" in self.parm:
  1103. # if user sets localpath for file, use it instead.
  1104. self.localpath = self.parm["localpath"]
  1105. self.basename = os.path.basename(self.localpath)
  1106. elif self.localfile:
  1107. self.localpath = self.method.localpath(self, d)
  1108. dldir = d.getVar("DL_DIR")
  1109. if not self.needdonestamp:
  1110. return
  1111. # Note: .done and .lock files should always be in DL_DIR whereas localpath may not be.
  1112. if self.localpath and self.localpath.startswith(dldir):
  1113. basepath = self.localpath
  1114. elif self.localpath:
  1115. basepath = dldir + os.sep + os.path.basename(self.localpath)
  1116. elif self.basepath or self.basename:
  1117. basepath = dldir + os.sep + (self.basepath or self.basename)
  1118. else:
  1119. bb.fatal("Can't determine lock path for url %s" % url)
  1120. self.donestamp = basepath + '.done'
  1121. self.lockfile = basepath + '.lock'
  1122. def setup_revisions(self, d):
  1123. self.revisions = {}
  1124. for name in self.names:
  1125. self.revisions[name] = srcrev_internal_helper(self, d, name)
  1126. # add compatibility code for non name specified case
  1127. if len(self.names) == 1:
  1128. self.revision = self.revisions[self.names[0]]
  1129. def setup_localpath(self, d):
  1130. if not self.localpath:
  1131. self.localpath = self.method.localpath(self, d)
  1132. def getSRCDate(self, d):
  1133. """
  1134. Return the SRC Date for the component
  1135. d the bb.data module
  1136. """
  1137. if "srcdate" in self.parm:
  1138. return self.parm['srcdate']
  1139. pn = d.getVar("PN")
  1140. if pn:
  1141. return d.getVar("SRCDATE_%s" % pn) or d.getVar("SRCDATE") or d.getVar("DATE")
  1142. return d.getVar("SRCDATE") or d.getVar("DATE")
  1143. class FetchMethod(object):
  1144. """Base class for 'fetch'ing data"""
  1145. def __init__(self, urls=None):
  1146. self.urls = []
  1147. def supports(self, urldata, d):
  1148. """
  1149. Check to see if this fetch class supports a given url.
  1150. """
  1151. return 0
  1152. def localpath(self, urldata, d):
  1153. """
  1154. Return the local filename of a given url assuming a successful fetch.
  1155. Can also setup variables in urldata for use in go (saving code duplication
  1156. and duplicate code execution)
  1157. """
  1158. return os.path.join(d.getVar("DL_DIR"), urldata.localfile)
  1159. def supports_checksum(self, urldata):
  1160. """
  1161. Is localpath something that can be represented by a checksum?
  1162. """
  1163. # We cannot compute checksums for directories
  1164. if os.path.isdir(urldata.localpath):
  1165. return False
  1166. return True
  1167. def recommends_checksum(self, urldata):
  1168. """
  1169. Is the backend on where checksumming is recommended (should warnings
  1170. be displayed if there is no checksum)?
  1171. """
  1172. return False
  1173. def verify_donestamp(self, ud, d):
  1174. """
  1175. Verify the donestamp file
  1176. """
  1177. return verify_donestamp(ud, d)
  1178. def update_donestamp(self, ud, d):
  1179. """
  1180. Update the donestamp file
  1181. """
  1182. update_stamp(ud, d)
  1183. def _strip_leading_slashes(self, relpath):
  1184. """
  1185. Remove leading slash as os.path.join can't cope
  1186. """
  1187. while os.path.isabs(relpath):
  1188. relpath = relpath[1:]
  1189. return relpath
  1190. def setUrls(self, urls):
  1191. self.__urls = urls
  1192. def getUrls(self):
  1193. return self.__urls
  1194. urls = property(getUrls, setUrls, None, "Urls property")
  1195. def need_update(self, ud, d):
  1196. """
  1197. Force a fetch, even if localpath exists?
  1198. """
  1199. if os.path.exists(ud.localpath):
  1200. return False
  1201. return True
  1202. def supports_srcrev(self):
  1203. """
  1204. The fetcher supports auto source revisions (SRCREV)
  1205. """
  1206. return False
  1207. def download(self, urldata, d):
  1208. """
  1209. Fetch urls
  1210. Assumes localpath was called first
  1211. """
  1212. raise NoMethodError(urldata.url)
  1213. def unpack(self, urldata, rootdir, data):
  1214. iterate = False
  1215. file = urldata.localpath
  1216. try:
  1217. unpack = bb.utils.to_boolean(urldata.parm.get('unpack'), True)
  1218. except ValueError as exc:
  1219. bb.fatal("Invalid value for 'unpack' parameter for %s: %s" %
  1220. (file, urldata.parm.get('unpack')))
  1221. base, ext = os.path.splitext(file)
  1222. if ext in ['.gz', '.bz2', '.Z', '.xz', '.lz']:
  1223. efile = os.path.join(rootdir, os.path.basename(base))
  1224. else:
  1225. efile = file
  1226. cmd = None
  1227. if unpack:
  1228. tar_cmd = 'tar --extract --no-same-owner'
  1229. if 'striplevel' in urldata.parm:
  1230. tar_cmd += ' --strip-components=%s' % urldata.parm['striplevel']
  1231. if file.endswith('.tar'):
  1232. cmd = '%s -f %s' % (tar_cmd, file)
  1233. elif file.endswith('.tgz') or file.endswith('.tar.gz') or file.endswith('.tar.Z'):
  1234. cmd = '%s -z -f %s' % (tar_cmd, file)
  1235. elif file.endswith('.tbz') or file.endswith('.tbz2') or file.endswith('.tar.bz2'):
  1236. cmd = 'bzip2 -dc %s | %s -f -' % (file, tar_cmd)
  1237. elif file.endswith('.gz') or file.endswith('.Z') or file.endswith('.z'):
  1238. cmd = 'gzip -dc %s > %s' % (file, efile)
  1239. elif file.endswith('.bz2'):
  1240. cmd = 'bzip2 -dc %s > %s' % (file, efile)
  1241. elif file.endswith('.txz') or file.endswith('.tar.xz'):
  1242. cmd = 'xz -dc %s | %s -f -' % (file, tar_cmd)
  1243. elif file.endswith('.xz'):
  1244. cmd = 'xz -dc %s > %s' % (file, efile)
  1245. elif file.endswith('.tar.lz'):
  1246. cmd = 'lzip -dc %s | %s -f -' % (file, tar_cmd)
  1247. elif file.endswith('.lz'):
  1248. cmd = 'lzip -dc %s > %s' % (file, efile)
  1249. elif file.endswith('.tar.7z'):
  1250. cmd = '7z x -so %s | %s -f -' % (file, tar_cmd)
  1251. elif file.endswith('.7z'):
  1252. cmd = '7za x -y %s 1>/dev/null' % file
  1253. elif file.endswith('.tzst') or file.endswith('.tar.zst'):
  1254. cmd = 'zstd --decompress --stdout %s | %s -f -' % (file, tar_cmd)
  1255. elif file.endswith('.zst'):
  1256. cmd = 'zstd --decompress --stdout %s > %s' % (file, efile)
  1257. elif file.endswith('.zip') or file.endswith('.jar'):
  1258. try:
  1259. dos = bb.utils.to_boolean(urldata.parm.get('dos'), False)
  1260. except ValueError as exc:
  1261. bb.fatal("Invalid value for 'dos' parameter for %s: %s" %
  1262. (file, urldata.parm.get('dos')))
  1263. cmd = 'unzip -q -o'
  1264. if dos:
  1265. cmd = '%s -a' % cmd
  1266. cmd = "%s '%s'" % (cmd, file)
  1267. elif file.endswith('.rpm') or file.endswith('.srpm'):
  1268. if 'extract' in urldata.parm:
  1269. unpack_file = urldata.parm.get('extract')
  1270. cmd = 'rpm2cpio.sh %s | cpio -id %s' % (file, unpack_file)
  1271. iterate = True
  1272. iterate_file = unpack_file
  1273. else:
  1274. cmd = 'rpm2cpio.sh %s | cpio -id' % (file)
  1275. elif file.endswith('.deb') or file.endswith('.ipk'):
  1276. output = subprocess.check_output(['ar', '-t', file], preexec_fn=subprocess_setup)
  1277. datafile = None
  1278. if output:
  1279. for line in output.decode().splitlines():
  1280. if line.startswith('data.tar.'):
  1281. datafile = line
  1282. break
  1283. else:
  1284. raise UnpackError("Unable to unpack deb/ipk package - does not contain data.tar.* file", urldata.url)
  1285. else:
  1286. raise UnpackError("Unable to unpack deb/ipk package - could not list contents", urldata.url)
  1287. cmd = 'ar x %s %s && %s -p -f %s && rm %s' % (file, datafile, tar_cmd, datafile, datafile)
  1288. # If 'subdir' param exists, create a dir and use it as destination for unpack cmd
  1289. if 'subdir' in urldata.parm:
  1290. subdir = urldata.parm.get('subdir')
  1291. if os.path.isabs(subdir):
  1292. if not os.path.realpath(subdir).startswith(os.path.realpath(rootdir)):
  1293. raise UnpackError("subdir argument isn't a subdirectory of unpack root %s" % rootdir, urldata.url)
  1294. unpackdir = subdir
  1295. else:
  1296. unpackdir = os.path.join(rootdir, subdir)
  1297. bb.utils.mkdirhier(unpackdir)
  1298. else:
  1299. unpackdir = rootdir
  1300. if not unpack or not cmd:
  1301. # If file == dest, then avoid any copies, as we already put the file into dest!
  1302. dest = os.path.join(unpackdir, os.path.basename(file))
  1303. if file != dest and not (os.path.exists(dest) and os.path.samefile(file, dest)):
  1304. destdir = '.'
  1305. # For file:// entries all intermediate dirs in path must be created at destination
  1306. if urldata.type == "file":
  1307. # Trailing '/' does a copying to wrong place
  1308. urlpath = urldata.path.rstrip('/')
  1309. # Want files places relative to cwd so no leading '/'
  1310. urlpath = urlpath.lstrip('/')
  1311. if urlpath.find("/") != -1:
  1312. destdir = urlpath.rsplit("/", 1)[0] + '/'
  1313. bb.utils.mkdirhier("%s/%s" % (unpackdir, destdir))
  1314. cmd = 'cp -fpPRH "%s" "%s"' % (file, destdir)
  1315. if not cmd:
  1316. return
  1317. path = data.getVar('PATH')
  1318. if path:
  1319. cmd = "PATH=\"%s\" %s" % (path, cmd)
  1320. bb.note("Unpacking %s to %s/" % (file, unpackdir))
  1321. ret = subprocess.call(cmd, preexec_fn=subprocess_setup, shell=True, cwd=unpackdir)
  1322. if ret != 0:
  1323. raise UnpackError("Unpack command %s failed with return value %s" % (cmd, ret), urldata.url)
  1324. if iterate is True:
  1325. iterate_urldata = urldata
  1326. iterate_urldata.localpath = "%s/%s" % (rootdir, iterate_file)
  1327. self.unpack(urldata, rootdir, data)
  1328. return
  1329. def clean(self, urldata, d):
  1330. """
  1331. Clean any existing full or partial download
  1332. """
  1333. bb.utils.remove(urldata.localpath)
  1334. def try_premirror(self, urldata, d):
  1335. """
  1336. Should premirrors be used?
  1337. """
  1338. return True
  1339. def try_mirrors(self, fetch, urldata, d, mirrors, check=False):
  1340. """
  1341. Try to use a mirror
  1342. """
  1343. return bool(try_mirrors(fetch, d, urldata, mirrors, check))
  1344. def checkstatus(self, fetch, urldata, d):
  1345. """
  1346. Check the status of a URL
  1347. Assumes localpath was called first
  1348. """
  1349. logger.info("URL %s could not be checked for status since no method exists.", urldata.url)
  1350. return True
  1351. def latest_revision(self, ud, d, name):
  1352. """
  1353. Look in the cache for the latest revision, if not present ask the SCM.
  1354. """
  1355. if not hasattr(self, "_latest_revision"):
  1356. raise ParameterError("The fetcher for this URL does not support _latest_revision", ud.url)
  1357. revs = bb.persist_data.persist('BB_URI_HEADREVS', d)
  1358. key = self.generate_revision_key(ud, d, name)
  1359. try:
  1360. return revs[key]
  1361. except KeyError:
  1362. revs[key] = rev = self._latest_revision(ud, d, name)
  1363. return rev
  1364. def sortable_revision(self, ud, d, name):
  1365. latest_rev = self._build_revision(ud, d, name)
  1366. return True, str(latest_rev)
  1367. def generate_revision_key(self, ud, d, name):
  1368. return self._revision_key(ud, d, name)
  1369. def latest_versionstring(self, ud, d):
  1370. """
  1371. Compute the latest release name like "x.y.x" in "x.y.x+gitHASH"
  1372. by searching through the tags output of ls-remote, comparing
  1373. versions and returning the highest match as a (version, revision) pair.
  1374. """
  1375. return ('', '')
  1376. def done(self, ud, d):
  1377. """
  1378. Is the download done ?
  1379. """
  1380. if os.path.exists(ud.localpath):
  1381. return True
  1382. return False
  1383. def implicit_urldata(self, ud, d):
  1384. """
  1385. Get a list of FetchData objects for any implicit URLs that will also
  1386. be downloaded when we fetch the given URL.
  1387. """
  1388. return []
  1389. class Fetch(object):
  1390. def __init__(self, urls, d, cache = True, localonly = False, connection_cache = None):
  1391. if localonly and cache:
  1392. raise Exception("bb.fetch2.Fetch.__init__: cannot set cache and localonly at same time")
  1393. if not urls:
  1394. urls = d.getVar("SRC_URI").split()
  1395. self.urls = urls
  1396. self.d = d
  1397. self.ud = {}
  1398. self.connection_cache = connection_cache
  1399. fn = d.getVar('FILE')
  1400. mc = d.getVar('__BBMULTICONFIG') or ""
  1401. key = None
  1402. if cache and fn:
  1403. key = mc + fn + str(id(d))
  1404. if key in urldata_cache:
  1405. self.ud = urldata_cache[key]
  1406. for url in urls:
  1407. if url not in self.ud:
  1408. try:
  1409. self.ud[url] = FetchData(url, d, localonly)
  1410. except NonLocalMethod:
  1411. if localonly:
  1412. self.ud[url] = None
  1413. pass
  1414. if key:
  1415. urldata_cache[key] = self.ud
  1416. def localpath(self, url):
  1417. if url not in self.urls:
  1418. self.ud[url] = FetchData(url, self.d)
  1419. self.ud[url].setup_localpath(self.d)
  1420. return self.d.expand(self.ud[url].localpath)
  1421. def localpaths(self):
  1422. """
  1423. Return a list of the local filenames, assuming successful fetch
  1424. """
  1425. local = []
  1426. for u in self.urls:
  1427. ud = self.ud[u]
  1428. ud.setup_localpath(self.d)
  1429. local.append(ud.localpath)
  1430. return local
  1431. def download(self, urls=None):
  1432. """
  1433. Fetch all urls
  1434. """
  1435. if not urls:
  1436. urls = self.urls
  1437. network = self.d.getVar("BB_NO_NETWORK")
  1438. premirroronly = bb.utils.to_boolean(self.d.getVar("BB_FETCH_PREMIRRORONLY"))
  1439. for u in urls:
  1440. ud = self.ud[u]
  1441. ud.setup_localpath(self.d)
  1442. m = ud.method
  1443. done = False
  1444. if ud.lockfile:
  1445. lf = bb.utils.lockfile(ud.lockfile)
  1446. try:
  1447. self.d.setVar("BB_NO_NETWORK", network)
  1448. if m.verify_donestamp(ud, self.d) and not m.need_update(ud, self.d):
  1449. done = True
  1450. elif m.try_premirror(ud, self.d):
  1451. logger.debug("Trying PREMIRRORS")
  1452. mirrors = mirror_from_string(self.d.getVar('PREMIRRORS'))
  1453. done = m.try_mirrors(self, ud, self.d, mirrors)
  1454. if done:
  1455. try:
  1456. # early checksum verification so that if the checksum of the premirror
  1457. # contents mismatch the fetcher can still try upstream and mirrors
  1458. m.update_donestamp(ud, self.d)
  1459. except ChecksumError as e:
  1460. logger.warning("Checksum failure encountered with premirror download of %s - will attempt other sources." % u)
  1461. logger.debug(str(e))
  1462. done = False
  1463. if premirroronly:
  1464. self.d.setVar("BB_NO_NETWORK", "1")
  1465. firsterr = None
  1466. verified_stamp = False
  1467. if done:
  1468. verified_stamp = m.verify_donestamp(ud, self.d)
  1469. if not done and (not verified_stamp or m.need_update(ud, self.d)):
  1470. try:
  1471. if not trusted_network(self.d, ud.url):
  1472. raise UntrustedUrl(ud.url)
  1473. logger.debug("Trying Upstream")
  1474. m.download(ud, self.d)
  1475. if hasattr(m, "build_mirror_data"):
  1476. m.build_mirror_data(ud, self.d)
  1477. done = True
  1478. # early checksum verify, so that if checksum mismatched,
  1479. # fetcher still have chance to fetch from mirror
  1480. m.update_donestamp(ud, self.d)
  1481. except bb.fetch2.NetworkAccess:
  1482. raise
  1483. except BBFetchException as e:
  1484. if isinstance(e, ChecksumError):
  1485. logger.warning("Checksum failure encountered with download of %s - will attempt other sources if available" % u)
  1486. logger.debug(str(e))
  1487. if os.path.exists(ud.localpath):
  1488. rename_bad_checksum(ud, e.checksum)
  1489. elif isinstance(e, NoChecksumError):
  1490. raise
  1491. else:
  1492. logger.warning('Failed to fetch URL %s, attempting MIRRORS if available' % u)
  1493. logger.debug(str(e))
  1494. firsterr = e
  1495. # Remove any incomplete fetch
  1496. if not verified_stamp:
  1497. m.clean(ud, self.d)
  1498. logger.debug("Trying MIRRORS")
  1499. mirrors = mirror_from_string(self.d.getVar('MIRRORS'))
  1500. done = m.try_mirrors(self, ud, self.d, mirrors)
  1501. if not done or not m.done(ud, self.d):
  1502. if firsterr:
  1503. logger.error(str(firsterr))
  1504. raise FetchError("Unable to fetch URL from any source.", u)
  1505. m.update_donestamp(ud, self.d)
  1506. except IOError as e:
  1507. if e.errno in [errno.ESTALE]:
  1508. logger.error("Stale Error Observed %s." % u)
  1509. raise ChecksumError("Stale Error Detected")
  1510. except BBFetchException as e:
  1511. if isinstance(e, ChecksumError):
  1512. logger.error("Checksum failure fetching %s" % u)
  1513. raise
  1514. finally:
  1515. if ud.lockfile:
  1516. bb.utils.unlockfile(lf)
  1517. def checkstatus(self, urls=None):
  1518. """
  1519. Check all URLs exist upstream.
  1520. Returns None if the URLs exist, raises FetchError if the check wasn't
  1521. successful but there wasn't an error (such as file not found), and
  1522. raises other exceptions in error cases.
  1523. """
  1524. if not urls:
  1525. urls = self.urls
  1526. for u in urls:
  1527. ud = self.ud[u]
  1528. ud.setup_localpath(self.d)
  1529. m = ud.method
  1530. logger.debug("Testing URL %s", u)
  1531. # First try checking uri, u, from PREMIRRORS
  1532. mirrors = mirror_from_string(self.d.getVar('PREMIRRORS'))
  1533. ret = m.try_mirrors(self, ud, self.d, mirrors, True)
  1534. if not ret:
  1535. # Next try checking from the original uri, u
  1536. ret = m.checkstatus(self, ud, self.d)
  1537. if not ret:
  1538. # Finally, try checking uri, u, from MIRRORS
  1539. mirrors = mirror_from_string(self.d.getVar('MIRRORS'))
  1540. ret = m.try_mirrors(self, ud, self.d, mirrors, True)
  1541. if not ret:
  1542. raise FetchError("URL %s doesn't work" % u, u)
  1543. def unpack(self, root, urls=None):
  1544. """
  1545. Unpack urls to root
  1546. """
  1547. if not urls:
  1548. urls = self.urls
  1549. for u in urls:
  1550. ud = self.ud[u]
  1551. ud.setup_localpath(self.d)
  1552. if ud.lockfile:
  1553. lf = bb.utils.lockfile(ud.lockfile)
  1554. ud.method.unpack(ud, root, self.d)
  1555. if ud.lockfile:
  1556. bb.utils.unlockfile(lf)
  1557. def clean(self, urls=None):
  1558. """
  1559. Clean files that the fetcher gets or places
  1560. """
  1561. if not urls:
  1562. urls = self.urls
  1563. for url in urls:
  1564. if url not in self.ud:
  1565. self.ud[url] = FetchData(url, self.d)
  1566. ud = self.ud[url]
  1567. ud.setup_localpath(self.d)
  1568. if not ud.localfile and ud.localpath is None:
  1569. continue
  1570. if ud.lockfile:
  1571. lf = bb.utils.lockfile(ud.lockfile)
  1572. ud.method.clean(ud, self.d)
  1573. if ud.donestamp:
  1574. bb.utils.remove(ud.donestamp)
  1575. if ud.lockfile:
  1576. bb.utils.unlockfile(lf)
  1577. def expanded_urldata(self, urls=None):
  1578. """
  1579. Get an expanded list of FetchData objects covering both the given
  1580. URLS and any additional implicit URLs that are added automatically by
  1581. the appropriate FetchMethod.
  1582. """
  1583. if not urls:
  1584. urls = self.urls
  1585. urldata = []
  1586. for url in urls:
  1587. ud = self.ud[url]
  1588. urldata.append(ud)
  1589. urldata += ud.method.implicit_urldata(ud, self.d)
  1590. return urldata
  1591. class FetchConnectionCache(object):
  1592. """
  1593. A class which represents an container for socket connections.
  1594. """
  1595. def __init__(self):
  1596. self.cache = {}
  1597. def get_connection_name(self, host, port):
  1598. return host + ':' + str(port)
  1599. def add_connection(self, host, port, connection):
  1600. cn = self.get_connection_name(host, port)
  1601. if cn not in self.cache:
  1602. self.cache[cn] = connection
  1603. def get_connection(self, host, port):
  1604. connection = None
  1605. cn = self.get_connection_name(host, port)
  1606. if cn in self.cache:
  1607. connection = self.cache[cn]
  1608. return connection
  1609. def remove_connection(self, host, port):
  1610. cn = self.get_connection_name(host, port)
  1611. if cn in self.cache:
  1612. self.cache[cn].close()
  1613. del self.cache[cn]
  1614. def close_connections(self):
  1615. for cn in list(self.cache.keys()):
  1616. self.cache[cn].close()
  1617. del self.cache[cn]
  1618. from . import cvs
  1619. from . import git
  1620. from . import gitsm
  1621. from . import gitannex
  1622. from . import local
  1623. from . import svn
  1624. from . import wget
  1625. from . import ssh
  1626. from . import sftp
  1627. from . import s3
  1628. from . import perforce
  1629. from . import bzr
  1630. from . import hg
  1631. from . import osc
  1632. from . import repo
  1633. from . import clearcase
  1634. from . import npm
  1635. from . import npmsw
  1636. from . import az
  1637. from . import crate
  1638. methods.append(local.Local())
  1639. methods.append(wget.Wget())
  1640. methods.append(svn.Svn())
  1641. methods.append(git.Git())
  1642. methods.append(gitsm.GitSM())
  1643. methods.append(gitannex.GitANNEX())
  1644. methods.append(cvs.Cvs())
  1645. methods.append(ssh.SSH())
  1646. methods.append(sftp.SFTP())
  1647. methods.append(s3.S3())
  1648. methods.append(perforce.Perforce())
  1649. methods.append(bzr.Bzr())
  1650. methods.append(hg.Hg())
  1651. methods.append(osc.Osc())
  1652. methods.append(repo.Repo())
  1653. methods.append(clearcase.ClearCase())
  1654. methods.append(npm.Npm())
  1655. methods.append(npmsw.NpmShrinkWrap())
  1656. methods.append(az.Az())
  1657. methods.append(crate.Crate())