wget.py 27 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659
  1. """
  2. BitBake 'Fetch' implementations
  3. Classes for obtaining upstream sources for the
  4. BitBake build tools.
  5. """
  6. # Copyright (C) 2003, 2004 Chris Larson
  7. #
  8. # SPDX-License-Identifier: GPL-2.0-only
  9. #
  10. # Based on functions from the base bb module, Copyright 2003 Holger Schurig
  11. import shlex
  12. import re
  13. import tempfile
  14. import os
  15. import errno
  16. import bb
  17. import bb.progress
  18. import socket
  19. import http.client
  20. import urllib.request, urllib.parse, urllib.error
  21. from bb.fetch2 import FetchMethod
  22. from bb.fetch2 import FetchError
  23. from bb.fetch2 import logger
  24. from bb.fetch2 import runfetchcmd
  25. from bs4 import BeautifulSoup
  26. from bs4 import SoupStrainer
  27. class WgetProgressHandler(bb.progress.LineFilterProgressHandler):
  28. """
  29. Extract progress information from wget output.
  30. Note: relies on --progress=dot (with -v or without -q/-nv) being
  31. specified on the wget command line.
  32. """
  33. def __init__(self, d):
  34. super(WgetProgressHandler, self).__init__(d)
  35. # Send an initial progress event so the bar gets shown
  36. self._fire_progress(0)
  37. def writeline(self, line):
  38. percs = re.findall(r'(\d+)%\s+([\d.]+[A-Z])', line)
  39. if percs:
  40. progress = int(percs[-1][0])
  41. rate = percs[-1][1] + '/s'
  42. self.update(progress, rate)
  43. return False
  44. return True
  45. class Wget(FetchMethod):
  46. """Class to fetch urls via 'wget'"""
  47. def check_certs(self, d):
  48. """
  49. Should certificates be checked?
  50. """
  51. return (d.getVar("BB_CHECK_SSL_CERTS") or "1") != "0"
  52. def supports(self, ud, d):
  53. """
  54. Check to see if a given url can be fetched with wget.
  55. """
  56. return ud.type in ['http', 'https', 'ftp', 'ftps']
  57. def recommends_checksum(self, urldata):
  58. return True
  59. def urldata_init(self, ud, d):
  60. if 'protocol' in ud.parm:
  61. if ud.parm['protocol'] == 'git':
  62. raise bb.fetch2.ParameterError("Invalid protocol - if you wish to fetch from a git repository using http, you need to instead use the git:// prefix with protocol=http", ud.url)
  63. if 'downloadfilename' in ud.parm:
  64. ud.basename = ud.parm['downloadfilename']
  65. else:
  66. ud.basename = os.path.basename(ud.path)
  67. ud.localfile = d.expand(urllib.parse.unquote(ud.basename))
  68. if not ud.localfile:
  69. ud.localfile = d.expand(urllib.parse.unquote(ud.host + ud.path).replace("/", "."))
  70. self.basecmd = d.getVar("FETCHCMD_wget") or "/usr/bin/env wget -t 2 -T 100"
  71. if ud.type == 'ftp' or ud.type == 'ftps':
  72. self.basecmd += " --passive-ftp"
  73. if not self.check_certs(d):
  74. self.basecmd += " --no-check-certificate"
  75. def _runwget(self, ud, d, command, quiet, workdir=None):
  76. progresshandler = WgetProgressHandler(d)
  77. logger.debug2("Fetching %s using command '%s'" % (ud.url, command))
  78. bb.fetch2.check_network_access(d, command, ud.url)
  79. runfetchcmd(command + ' --progress=dot -v', d, quiet, log=progresshandler, workdir=workdir)
  80. def download(self, ud, d):
  81. """Fetch urls"""
  82. fetchcmd = self.basecmd
  83. dldir = os.path.realpath(d.getVar("DL_DIR"))
  84. localpath = os.path.join(dldir, ud.localfile) + ".tmp"
  85. bb.utils.mkdirhier(os.path.dirname(localpath))
  86. fetchcmd += " -O %s" % shlex.quote(localpath)
  87. if ud.user and ud.pswd:
  88. fetchcmd += " --auth-no-challenge"
  89. if ud.parm.get("redirectauth", "1") == "1":
  90. # An undocumented feature of wget is that if the
  91. # username/password are specified on the URI, wget will only
  92. # send the Authorization header to the first host and not to
  93. # any hosts that it is redirected to. With the increasing
  94. # usage of temporary AWS URLs, this difference now matters as
  95. # AWS will reject any request that has authentication both in
  96. # the query parameters (from the redirect) and in the
  97. # Authorization header.
  98. fetchcmd += " --user=%s --password=%s" % (ud.user, ud.pswd)
  99. uri = ud.url.split(";")[0]
  100. if os.path.exists(ud.localpath):
  101. # file exists, but we didnt complete it.. trying again..
  102. fetchcmd += " -c -P " + dldir + " '" + uri + "'"
  103. else:
  104. fetchcmd += " -P " + dldir + " '" + uri + "'"
  105. self._runwget(ud, d, fetchcmd, False)
  106. # Sanity check since wget can pretend it succeed when it didn't
  107. # Also, this used to happen if sourceforge sent us to the mirror page
  108. if not os.path.exists(localpath):
  109. raise FetchError("The fetch command returned success for url %s but %s doesn't exist?!" % (uri, localpath), uri)
  110. if os.path.getsize(localpath) == 0:
  111. os.remove(localpath)
  112. raise FetchError("The fetch of %s resulted in a zero size file?! Deleting and failing since this isn't right." % (uri), uri)
  113. # Try and verify any checksum now, meaning if it isn't correct, we don't remove the
  114. # original file, which might be a race (imagine two recipes referencing the same
  115. # source, one with an incorrect checksum)
  116. bb.fetch2.verify_checksum(ud, d, localpath=localpath, fatal_nochecksum=False)
  117. # Remove the ".tmp" and move the file into position atomically
  118. # Our lock prevents multiple writers but mirroring code may grab incomplete files
  119. os.rename(localpath, localpath[:-4])
  120. return True
  121. def checkstatus(self, fetch, ud, d, try_again=True):
  122. class HTTPConnectionCache(http.client.HTTPConnection):
  123. if fetch.connection_cache:
  124. def connect(self):
  125. """Connect to the host and port specified in __init__."""
  126. sock = fetch.connection_cache.get_connection(self.host, self.port)
  127. if sock:
  128. self.sock = sock
  129. else:
  130. self.sock = socket.create_connection((self.host, self.port),
  131. self.timeout, self.source_address)
  132. fetch.connection_cache.add_connection(self.host, self.port, self.sock)
  133. if self._tunnel_host:
  134. self._tunnel()
  135. class CacheHTTPHandler(urllib.request.HTTPHandler):
  136. def http_open(self, req):
  137. return self.do_open(HTTPConnectionCache, req)
  138. def do_open(self, http_class, req):
  139. """Return an addinfourl object for the request, using http_class.
  140. http_class must implement the HTTPConnection API from httplib.
  141. The addinfourl return value is a file-like object. It also
  142. has methods and attributes including:
  143. - info(): return a mimetools.Message object for the headers
  144. - geturl(): return the original request URL
  145. - code: HTTP status code
  146. """
  147. host = req.host
  148. if not host:
  149. raise urllib.error.URLError('no host given')
  150. h = http_class(host, timeout=req.timeout) # will parse host:port
  151. h.set_debuglevel(self._debuglevel)
  152. headers = dict(req.unredirected_hdrs)
  153. headers.update(dict((k, v) for k, v in list(req.headers.items())
  154. if k not in headers))
  155. # We want to make an HTTP/1.1 request, but the addinfourl
  156. # class isn't prepared to deal with a persistent connection.
  157. # It will try to read all remaining data from the socket,
  158. # which will block while the server waits for the next request.
  159. # So make sure the connection gets closed after the (only)
  160. # request.
  161. # Don't close connection when connection_cache is enabled,
  162. if fetch.connection_cache is None:
  163. headers["Connection"] = "close"
  164. else:
  165. headers["Connection"] = "Keep-Alive" # Works for HTTP/1.0
  166. headers = dict(
  167. (name.title(), val) for name, val in list(headers.items()))
  168. if req._tunnel_host:
  169. tunnel_headers = {}
  170. proxy_auth_hdr = "Proxy-Authorization"
  171. if proxy_auth_hdr in headers:
  172. tunnel_headers[proxy_auth_hdr] = headers[proxy_auth_hdr]
  173. # Proxy-Authorization should not be sent to origin
  174. # server.
  175. del headers[proxy_auth_hdr]
  176. h.set_tunnel(req._tunnel_host, headers=tunnel_headers)
  177. try:
  178. h.request(req.get_method(), req.selector, req.data, headers)
  179. except socket.error as err: # XXX what error?
  180. # Don't close connection when cache is enabled.
  181. # Instead, try to detect connections that are no longer
  182. # usable (for example, closed unexpectedly) and remove
  183. # them from the cache.
  184. if fetch.connection_cache is None:
  185. h.close()
  186. elif isinstance(err, OSError) and err.errno == errno.EBADF:
  187. # This happens when the server closes the connection despite the Keep-Alive.
  188. # Apparently urllib then uses the file descriptor, expecting it to be
  189. # connected, when in reality the connection is already gone.
  190. # We let the request fail and expect it to be
  191. # tried once more ("try_again" in check_status()),
  192. # with the dead connection removed from the cache.
  193. # If it still fails, we give up, which can happen for bad
  194. # HTTP proxy settings.
  195. fetch.connection_cache.remove_connection(h.host, h.port)
  196. raise urllib.error.URLError(err)
  197. else:
  198. try:
  199. r = h.getresponse()
  200. except TimeoutError as e:
  201. if fetch.connection_cache:
  202. fetch.connection_cache.remove_connection(h.host, h.port)
  203. raise TimeoutError(e)
  204. # Pick apart the HTTPResponse object to get the addinfourl
  205. # object initialized properly.
  206. # Wrap the HTTPResponse object in socket's file object adapter
  207. # for Windows. That adapter calls recv(), so delegate recv()
  208. # to read(). This weird wrapping allows the returned object to
  209. # have readline() and readlines() methods.
  210. # XXX It might be better to extract the read buffering code
  211. # out of socket._fileobject() and into a base class.
  212. r.recv = r.read
  213. # no data, just have to read
  214. r.read()
  215. class fp_dummy(object):
  216. def read(self):
  217. return ""
  218. def readline(self):
  219. return ""
  220. def close(self):
  221. pass
  222. closed = False
  223. resp = urllib.response.addinfourl(fp_dummy(), r.msg, req.get_full_url())
  224. resp.code = r.status
  225. resp.msg = r.reason
  226. # Close connection when server request it.
  227. if fetch.connection_cache is not None:
  228. if 'Connection' in r.msg and r.msg['Connection'] == 'close':
  229. fetch.connection_cache.remove_connection(h.host, h.port)
  230. return resp
  231. class HTTPMethodFallback(urllib.request.BaseHandler):
  232. """
  233. Fallback to GET if HEAD is not allowed (405 HTTP error)
  234. """
  235. def http_error_405(self, req, fp, code, msg, headers):
  236. fp.read()
  237. fp.close()
  238. if req.get_method() != 'GET':
  239. newheaders = dict((k, v) for k, v in list(req.headers.items())
  240. if k.lower() not in ("content-length", "content-type"))
  241. return self.parent.open(urllib.request.Request(req.get_full_url(),
  242. headers=newheaders,
  243. origin_req_host=req.origin_req_host,
  244. unverifiable=True))
  245. raise urllib.request.HTTPError(req, code, msg, headers, None)
  246. # Some servers (e.g. GitHub archives, hosted on Amazon S3) return 403
  247. # Forbidden when they actually mean 405 Method Not Allowed.
  248. http_error_403 = http_error_405
  249. class FixedHTTPRedirectHandler(urllib.request.HTTPRedirectHandler):
  250. """
  251. urllib2.HTTPRedirectHandler resets the method to GET on redirect,
  252. when we want to follow redirects using the original method.
  253. """
  254. def redirect_request(self, req, fp, code, msg, headers, newurl):
  255. newreq = urllib.request.HTTPRedirectHandler.redirect_request(self, req, fp, code, msg, headers, newurl)
  256. newreq.get_method = req.get_method
  257. return newreq
  258. # We need to update the environment here as both the proxy and HTTPS
  259. # handlers need variables set. The proxy needs http_proxy and friends to
  260. # be set, and HTTPSHandler ends up calling into openssl to load the
  261. # certificates. In buildtools configurations this will be looking at the
  262. # wrong place for certificates by default: we set SSL_CERT_FILE to the
  263. # right location in the buildtools environment script but as BitBake
  264. # prunes prunes the environment this is lost. When binaries are executed
  265. # runfetchcmd ensures these values are in the environment, but this is
  266. # pure Python so we need to update the environment.
  267. #
  268. # Avoid tramping the environment too much by using bb.utils.environment
  269. # to scope the changes to the build_opener request, which is when the
  270. # environment lookups happen.
  271. newenv = bb.fetch2.get_fetcher_environment(d)
  272. with bb.utils.environment(**newenv):
  273. import ssl
  274. if self.check_certs(d):
  275. context = ssl.create_default_context()
  276. else:
  277. context = ssl._create_unverified_context()
  278. handlers = [FixedHTTPRedirectHandler,
  279. HTTPMethodFallback,
  280. urllib.request.ProxyHandler(),
  281. CacheHTTPHandler(),
  282. urllib.request.HTTPSHandler(context=context)]
  283. opener = urllib.request.build_opener(*handlers)
  284. try:
  285. uri_base = ud.url.split(";")[0]
  286. uri = "{}://{}{}".format(urllib.parse.urlparse(uri_base).scheme, ud.host, ud.path)
  287. r = urllib.request.Request(uri)
  288. r.get_method = lambda: "HEAD"
  289. # Some servers (FusionForge, as used on Alioth) require that the
  290. # optional Accept header is set.
  291. r.add_header("Accept", "*/*")
  292. r.add_header("User-Agent", "bitbake/{}".format(bb.__version__))
  293. def add_basic_auth(login_str, request):
  294. '''Adds Basic auth to http request, pass in login:password as string'''
  295. import base64
  296. encodeuser = base64.b64encode(login_str.encode('utf-8')).decode("utf-8")
  297. authheader = "Basic %s" % encodeuser
  298. r.add_header("Authorization", authheader)
  299. if ud.user and ud.pswd:
  300. add_basic_auth(ud.user + ':' + ud.pswd, r)
  301. try:
  302. import netrc
  303. auth_data = netrc.netrc().authenticators(urllib.parse.urlparse(uri).hostname)
  304. if auth_data:
  305. login, _, password = auth_data
  306. add_basic_auth("%s:%s" % (login, password), r)
  307. except (FileNotFoundError, netrc.NetrcParseError):
  308. pass
  309. with opener.open(r, timeout=100) as response:
  310. pass
  311. except (urllib.error.URLError, ConnectionResetError, TimeoutError) as e:
  312. if try_again:
  313. logger.debug2("checkstatus: trying again")
  314. return self.checkstatus(fetch, ud, d, False)
  315. else:
  316. # debug for now to avoid spamming the logs in e.g. remote sstate searches
  317. logger.debug2("checkstatus() urlopen failed for %s: %s" % (uri,e))
  318. return False
  319. return True
  320. def _parse_path(self, regex, s):
  321. """
  322. Find and group name, version and archive type in the given string s
  323. """
  324. m = regex.search(s)
  325. if m:
  326. pname = ''
  327. pver = ''
  328. ptype = ''
  329. mdict = m.groupdict()
  330. if 'name' in mdict.keys():
  331. pname = mdict['name']
  332. if 'pver' in mdict.keys():
  333. pver = mdict['pver']
  334. if 'type' in mdict.keys():
  335. ptype = mdict['type']
  336. bb.debug(3, "_parse_path: %s, %s, %s" % (pname, pver, ptype))
  337. return (pname, pver, ptype)
  338. return None
  339. def _modelate_version(self, version):
  340. if version[0] in ['.', '-']:
  341. if version[1].isdigit():
  342. version = version[1] + version[0] + version[2:len(version)]
  343. else:
  344. version = version[1:len(version)]
  345. version = re.sub('-', '.', version)
  346. version = re.sub('_', '.', version)
  347. version = re.sub('(rc)+', '.1000.', version)
  348. version = re.sub('(beta)+', '.100.', version)
  349. version = re.sub('(alpha)+', '.10.', version)
  350. if version[0] == 'v':
  351. version = version[1:len(version)]
  352. return version
  353. def _vercmp(self, old, new):
  354. """
  355. Check whether 'new' is newer than 'old' version. We use existing vercmp() for the
  356. purpose. PE is cleared in comparison as it's not for build, and PR is cleared too
  357. for simplicity as it's somehow difficult to get from various upstream format
  358. """
  359. (oldpn, oldpv, oldsuffix) = old
  360. (newpn, newpv, newsuffix) = new
  361. # Check for a new suffix type that we have never heard of before
  362. if newsuffix:
  363. m = self.suffix_regex_comp.search(newsuffix)
  364. if not m:
  365. bb.warn("%s has a possible unknown suffix: %s" % (newpn, newsuffix))
  366. return False
  367. # Not our package so ignore it
  368. if oldpn != newpn:
  369. return False
  370. oldpv = self._modelate_version(oldpv)
  371. newpv = self._modelate_version(newpv)
  372. return bb.utils.vercmp(("0", oldpv, ""), ("0", newpv, ""))
  373. def _fetch_index(self, uri, ud, d):
  374. """
  375. Run fetch checkstatus to get directory information
  376. """
  377. f = tempfile.NamedTemporaryFile()
  378. with tempfile.TemporaryDirectory(prefix="wget-index-") as workdir, tempfile.NamedTemporaryFile(dir=workdir, prefix="wget-listing-") as f:
  379. fetchcmd = self.basecmd
  380. fetchcmd += " -O " + f.name + " '" + uri + "'"
  381. try:
  382. self._runwget(ud, d, fetchcmd, True, workdir=workdir)
  383. fetchresult = f.read()
  384. except bb.fetch2.BBFetchException:
  385. fetchresult = ""
  386. return fetchresult
  387. def _check_latest_version(self, url, package, package_regex, current_version, ud, d):
  388. """
  389. Return the latest version of a package inside a given directory path
  390. If error or no version, return ""
  391. """
  392. valid = 0
  393. version = ['', '', '']
  394. bb.debug(3, "VersionURL: %s" % (url))
  395. soup = BeautifulSoup(self._fetch_index(url, ud, d), "html.parser", parse_only=SoupStrainer("a"))
  396. if not soup:
  397. bb.debug(3, "*** %s NO SOUP" % (url))
  398. return ""
  399. for line in soup.find_all('a', href=True):
  400. bb.debug(3, "line['href'] = '%s'" % (line['href']))
  401. bb.debug(3, "line = '%s'" % (str(line)))
  402. newver = self._parse_path(package_regex, line['href'])
  403. if not newver:
  404. newver = self._parse_path(package_regex, str(line))
  405. if newver:
  406. bb.debug(3, "Upstream version found: %s" % newver[1])
  407. if valid == 0:
  408. version = newver
  409. valid = 1
  410. elif self._vercmp(version, newver) < 0:
  411. version = newver
  412. pupver = re.sub('_', '.', version[1])
  413. bb.debug(3, "*** %s -> UpstreamVersion = %s (CurrentVersion = %s)" %
  414. (package, pupver or "N/A", current_version[1]))
  415. if valid:
  416. return pupver
  417. return ""
  418. def _check_latest_version_by_dir(self, dirver, package, package_regex, current_version, ud, d):
  419. """
  420. Scan every directory in order to get upstream version.
  421. """
  422. version_dir = ['', '', '']
  423. version = ['', '', '']
  424. dirver_regex = re.compile(r"(?P<pfx>\D*)(?P<ver>(\d+[\.\-_])*(\d+))")
  425. s = dirver_regex.search(dirver)
  426. if s:
  427. version_dir[1] = s.group('ver')
  428. else:
  429. version_dir[1] = dirver
  430. dirs_uri = bb.fetch.encodeurl([ud.type, ud.host,
  431. ud.path.split(dirver)[0], ud.user, ud.pswd, {}])
  432. bb.debug(3, "DirURL: %s, %s" % (dirs_uri, package))
  433. soup = BeautifulSoup(self._fetch_index(dirs_uri, ud, d), "html.parser", parse_only=SoupStrainer("a"))
  434. if not soup:
  435. return version[1]
  436. for line in soup.find_all('a', href=True):
  437. s = dirver_regex.search(line['href'].strip("/"))
  438. if s:
  439. sver = s.group('ver')
  440. # When prefix is part of the version directory it need to
  441. # ensure that only version directory is used so remove previous
  442. # directories if exists.
  443. #
  444. # Example: pfx = '/dir1/dir2/v' and version = '2.5' the expected
  445. # result is v2.5.
  446. spfx = s.group('pfx').split('/')[-1]
  447. version_dir_new = ['', sver, '']
  448. if self._vercmp(version_dir, version_dir_new) <= 0:
  449. dirver_new = spfx + sver
  450. path = ud.path.replace(dirver, dirver_new, True) \
  451. .split(package)[0]
  452. uri = bb.fetch.encodeurl([ud.type, ud.host, path,
  453. ud.user, ud.pswd, {}])
  454. pupver = self._check_latest_version(uri,
  455. package, package_regex, current_version, ud, d)
  456. if pupver:
  457. version[1] = pupver
  458. version_dir = version_dir_new
  459. return version[1]
  460. def _init_regexes(self, package, ud, d):
  461. """
  462. Match as many patterns as possible such as:
  463. gnome-common-2.20.0.tar.gz (most common format)
  464. gtk+-2.90.1.tar.gz
  465. xf86-input-synaptics-12.6.9.tar.gz
  466. dri2proto-2.3.tar.gz
  467. blktool_4.orig.tar.gz
  468. libid3tag-0.15.1b.tar.gz
  469. unzip552.tar.gz
  470. icu4c-3_6-src.tgz
  471. genext2fs_1.3.orig.tar.gz
  472. gst-fluendo-mp3
  473. """
  474. # match most patterns which uses "-" as separator to version digits
  475. pn_prefix1 = r"[a-zA-Z][a-zA-Z0-9]*([-_][a-zA-Z]\w+)*\+?[-_]"
  476. # a loose pattern such as for unzip552.tar.gz
  477. pn_prefix2 = r"[a-zA-Z]+"
  478. # a loose pattern such as for 80325-quicky-0.4.tar.gz
  479. pn_prefix3 = r"[0-9]+[-]?[a-zA-Z]+"
  480. # Save the Package Name (pn) Regex for use later
  481. pn_regex = r"(%s|%s|%s)" % (pn_prefix1, pn_prefix2, pn_prefix3)
  482. # match version
  483. pver_regex = r"(([A-Z]*\d+[a-zA-Z]*[\.\-_]*)+)"
  484. # match arch
  485. parch_regex = "-source|_all_"
  486. # src.rpm extension was added only for rpm package. Can be removed if the rpm
  487. # packaged will always be considered as having to be manually upgraded
  488. psuffix_regex = r"(tar\.\w+|tgz|zip|xz|rpm|bz2|orig\.tar\.\w+|src\.tar\.\w+|src\.tgz|svnr\d+\.tar\.\w+|stable\.tar\.\w+|src\.rpm)"
  489. # match name, version and archive type of a package
  490. package_regex_comp = re.compile(r"(?P<name>%s?\.?v?)(?P<pver>%s)(?P<arch>%s)?[\.-](?P<type>%s$)"
  491. % (pn_regex, pver_regex, parch_regex, psuffix_regex))
  492. self.suffix_regex_comp = re.compile(psuffix_regex)
  493. # compile regex, can be specific by package or generic regex
  494. pn_regex = d.getVar('UPSTREAM_CHECK_REGEX')
  495. if pn_regex:
  496. package_custom_regex_comp = re.compile(pn_regex)
  497. else:
  498. version = self._parse_path(package_regex_comp, package)
  499. if version:
  500. package_custom_regex_comp = re.compile(
  501. r"(?P<name>%s)(?P<pver>%s)(?P<arch>%s)?[\.-](?P<type>%s)" %
  502. (re.escape(version[0]), pver_regex, parch_regex, psuffix_regex))
  503. else:
  504. package_custom_regex_comp = None
  505. return package_custom_regex_comp
  506. def latest_versionstring(self, ud, d):
  507. """
  508. Manipulate the URL and try to obtain the latest package version
  509. sanity check to ensure same name and type.
  510. """
  511. package = ud.path.split("/")[-1]
  512. current_version = ['', d.getVar('PV'), '']
  513. """possible to have no version in pkg name, such as spectrum-fw"""
  514. if not re.search(r"\d+", package):
  515. current_version[1] = re.sub('_', '.', current_version[1])
  516. current_version[1] = re.sub('-', '.', current_version[1])
  517. return (current_version[1], '')
  518. package_regex = self._init_regexes(package, ud, d)
  519. if package_regex is None:
  520. bb.warn("latest_versionstring: package %s don't match pattern" % (package))
  521. return ('', '')
  522. bb.debug(3, "latest_versionstring, regex: %s" % (package_regex.pattern))
  523. uri = ""
  524. regex_uri = d.getVar("UPSTREAM_CHECK_URI")
  525. if not regex_uri:
  526. path = ud.path.split(package)[0]
  527. # search for version matches on folders inside the path, like:
  528. # "5.7" in http://download.gnome.org/sources/${PN}/5.7/${PN}-${PV}.tar.gz
  529. dirver_regex = re.compile(r"(?P<dirver>[^/]*(\d+\.)*\d+([-_]r\d+)*)/")
  530. m = dirver_regex.findall(path)
  531. if m:
  532. pn = d.getVar('PN')
  533. dirver = m[-1][0]
  534. dirver_pn_regex = re.compile(r"%s\d?" % (re.escape(pn)))
  535. if not dirver_pn_regex.search(dirver):
  536. return (self._check_latest_version_by_dir(dirver,
  537. package, package_regex, current_version, ud, d), '')
  538. uri = bb.fetch.encodeurl([ud.type, ud.host, path, ud.user, ud.pswd, {}])
  539. else:
  540. uri = regex_uri
  541. return (self._check_latest_version(uri, package, package_regex,
  542. current_version, ud, d), '')