utils.py 54 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613
  1. """
  2. BitBake Utility Functions
  3. """
  4. # Copyright (C) 2004 Michael Lauer
  5. #
  6. # SPDX-License-Identifier: GPL-2.0-only
  7. #
  8. import re, fcntl, os, string, stat, shutil, time
  9. import sys
  10. import errno
  11. import logging
  12. import bb
  13. import bb.msg
  14. import multiprocessing
  15. import fcntl
  16. import importlib
  17. from importlib import machinery
  18. import itertools
  19. import subprocess
  20. import glob
  21. import fnmatch
  22. import traceback
  23. import errno
  24. import signal
  25. import collections
  26. import copy
  27. from subprocess import getstatusoutput
  28. from contextlib import contextmanager
  29. from ctypes import cdll
  30. logger = logging.getLogger("BitBake.Util")
  31. python_extensions = importlib.machinery.all_suffixes()
  32. def clean_context():
  33. return {
  34. "os": os,
  35. "bb": bb,
  36. "time": time,
  37. }
  38. def get_context():
  39. return _context
  40. def set_context(ctx):
  41. _context = ctx
  42. # Context used in better_exec, eval
  43. _context = clean_context()
  44. class VersionStringException(Exception):
  45. """Exception raised when an invalid version specification is found"""
  46. def explode_version(s):
  47. r = []
  48. alpha_regexp = re.compile(r'^([a-zA-Z]+)(.*)$')
  49. numeric_regexp = re.compile(r'^(\d+)(.*)$')
  50. while (s != ''):
  51. if s[0] in string.digits:
  52. m = numeric_regexp.match(s)
  53. r.append((0, int(m.group(1))))
  54. s = m.group(2)
  55. continue
  56. if s[0] in string.ascii_letters:
  57. m = alpha_regexp.match(s)
  58. r.append((1, m.group(1)))
  59. s = m.group(2)
  60. continue
  61. if s[0] == '~':
  62. r.append((-1, s[0]))
  63. else:
  64. r.append((2, s[0]))
  65. s = s[1:]
  66. return r
  67. def split_version(s):
  68. """Split a version string into its constituent parts (PE, PV, PR)"""
  69. s = s.strip(" <>=")
  70. e = 0
  71. if s.count(':'):
  72. e = int(s.split(":")[0])
  73. s = s.split(":")[1]
  74. r = ""
  75. if s.count('-'):
  76. r = s.rsplit("-", 1)[1]
  77. s = s.rsplit("-", 1)[0]
  78. v = s
  79. return (e, v, r)
  80. def vercmp_part(a, b):
  81. va = explode_version(a)
  82. vb = explode_version(b)
  83. while True:
  84. if va == []:
  85. (oa, ca) = (0, None)
  86. else:
  87. (oa, ca) = va.pop(0)
  88. if vb == []:
  89. (ob, cb) = (0, None)
  90. else:
  91. (ob, cb) = vb.pop(0)
  92. if (oa, ca) == (0, None) and (ob, cb) == (0, None):
  93. return 0
  94. if oa < ob:
  95. return -1
  96. elif oa > ob:
  97. return 1
  98. elif ca is None:
  99. return -1
  100. elif cb is None:
  101. return 1
  102. elif ca < cb:
  103. return -1
  104. elif ca > cb:
  105. return 1
  106. def vercmp(ta, tb):
  107. (ea, va, ra) = ta
  108. (eb, vb, rb) = tb
  109. r = int(ea or 0) - int(eb or 0)
  110. if (r == 0):
  111. r = vercmp_part(va, vb)
  112. if (r == 0):
  113. r = vercmp_part(ra, rb)
  114. return r
  115. def vercmp_string(a, b):
  116. ta = split_version(a)
  117. tb = split_version(b)
  118. return vercmp(ta, tb)
  119. def vercmp_string_op(a, b, op):
  120. """
  121. Compare two versions and check if the specified comparison operator matches the result of the comparison.
  122. This function is fairly liberal about what operators it will accept since there are a variety of styles
  123. depending on the context.
  124. """
  125. res = vercmp_string(a, b)
  126. if op in ('=', '=='):
  127. return res == 0
  128. elif op == '<=':
  129. return res <= 0
  130. elif op == '>=':
  131. return res >= 0
  132. elif op in ('>', '>>'):
  133. return res > 0
  134. elif op in ('<', '<<'):
  135. return res < 0
  136. elif op == '!=':
  137. return res != 0
  138. else:
  139. raise VersionStringException('Unsupported comparison operator "%s"' % op)
  140. def explode_deps(s):
  141. """
  142. Take an RDEPENDS style string of format:
  143. "DEPEND1 (optional version) DEPEND2 (optional version) ..."
  144. and return a list of dependencies.
  145. Version information is ignored.
  146. """
  147. r = []
  148. l = s.split()
  149. flag = False
  150. for i in l:
  151. if i[0] == '(':
  152. flag = True
  153. #j = []
  154. if not flag:
  155. r.append(i)
  156. #else:
  157. # j.append(i)
  158. if flag and i.endswith(')'):
  159. flag = False
  160. # Ignore version
  161. #r[-1] += ' ' + ' '.join(j)
  162. return r
  163. def explode_dep_versions2(s, *, sort=True):
  164. """
  165. Take an RDEPENDS style string of format:
  166. "DEPEND1 (optional version) DEPEND2 (optional version) ..."
  167. and return a dictionary of dependencies and versions.
  168. """
  169. r = collections.OrderedDict()
  170. l = s.replace(",", "").split()
  171. lastdep = None
  172. lastcmp = ""
  173. lastver = ""
  174. incmp = False
  175. inversion = False
  176. for i in l:
  177. if i[0] == '(':
  178. incmp = True
  179. i = i[1:].strip()
  180. if not i:
  181. continue
  182. if incmp:
  183. incmp = False
  184. inversion = True
  185. # This list is based on behavior and supported comparisons from deb, opkg and rpm.
  186. #
  187. # Even though =<, <<, ==, !=, =>, and >> may not be supported,
  188. # we list each possibly valid item.
  189. # The build system is responsible for validation of what it supports.
  190. if i.startswith(('<=', '=<', '<<', '==', '!=', '>=', '=>', '>>')):
  191. lastcmp = i[0:2]
  192. i = i[2:]
  193. elif i.startswith(('<', '>', '=')):
  194. lastcmp = i[0:1]
  195. i = i[1:]
  196. else:
  197. # This is an unsupported case!
  198. raise VersionStringException('Invalid version specification in "(%s" - invalid or missing operator' % i)
  199. lastcmp = (i or "")
  200. i = ""
  201. i.strip()
  202. if not i:
  203. continue
  204. if inversion:
  205. if i.endswith(')'):
  206. i = i[:-1] or ""
  207. inversion = False
  208. if lastver and i:
  209. lastver += " "
  210. if i:
  211. lastver += i
  212. if lastdep not in r:
  213. r[lastdep] = []
  214. r[lastdep].append(lastcmp + " " + lastver)
  215. continue
  216. #if not inversion:
  217. lastdep = i
  218. lastver = ""
  219. lastcmp = ""
  220. if not (i in r and r[i]):
  221. r[lastdep] = []
  222. if sort:
  223. r = collections.OrderedDict(sorted(r.items(), key=lambda x: x[0]))
  224. return r
  225. def explode_dep_versions(s):
  226. r = explode_dep_versions2(s)
  227. for d in r:
  228. if not r[d]:
  229. r[d] = None
  230. continue
  231. if len(r[d]) > 1:
  232. bb.warn("explode_dep_versions(): Item %s appeared in dependency string '%s' multiple times with different values. explode_dep_versions cannot cope with this." % (d, s))
  233. r[d] = r[d][0]
  234. return r
  235. def join_deps(deps, commasep=True):
  236. """
  237. Take the result from explode_dep_versions and generate a dependency string
  238. """
  239. result = []
  240. for dep in deps:
  241. if deps[dep]:
  242. if isinstance(deps[dep], list):
  243. for v in deps[dep]:
  244. result.append(dep + " (" + v + ")")
  245. else:
  246. result.append(dep + " (" + deps[dep] + ")")
  247. else:
  248. result.append(dep)
  249. if commasep:
  250. return ", ".join(result)
  251. else:
  252. return " ".join(result)
  253. def _print_trace(body, line):
  254. """
  255. Print the Environment of a Text Body
  256. """
  257. error = []
  258. # print the environment of the method
  259. min_line = max(1, line-4)
  260. max_line = min(line + 4, len(body))
  261. for i in range(min_line, max_line + 1):
  262. if line == i:
  263. error.append(' *** %.4d:%s' % (i, body[i-1].rstrip()))
  264. else:
  265. error.append(' %.4d:%s' % (i, body[i-1].rstrip()))
  266. return error
  267. def better_compile(text, file, realfile, mode = "exec", lineno = 0):
  268. """
  269. A better compile method. This method
  270. will print the offending lines.
  271. """
  272. try:
  273. cache = bb.methodpool.compile_cache(text)
  274. if cache:
  275. return cache
  276. # We can't add to the linenumbers for compile, we can pad to the correct number of blank lines though
  277. text2 = "\n" * int(lineno) + text
  278. code = compile(text2, realfile, mode)
  279. bb.methodpool.compile_cache_add(text, code)
  280. return code
  281. except Exception as e:
  282. error = []
  283. # split the text into lines again
  284. body = text.split('\n')
  285. error.append("Error in compiling python function in %s, line %s:\n" % (realfile, e.lineno))
  286. if hasattr(e, "lineno"):
  287. error.append("The code lines resulting in this error were:")
  288. # e.lineno: line's position in reaflile
  289. # lineno: function name's "position -1" in realfile
  290. # e.lineno - lineno: line's relative position in function
  291. error.extend(_print_trace(body, e.lineno - lineno))
  292. else:
  293. error.append("The function causing this error was:")
  294. for line in body:
  295. error.append(line)
  296. error.append("%s: %s" % (e.__class__.__name__, str(e)))
  297. logger.error("\n".join(error))
  298. e = bb.BBHandledException(e)
  299. raise e
  300. def _print_exception(t, value, tb, realfile, text, context):
  301. error = []
  302. try:
  303. exception = traceback.format_exception_only(t, value)
  304. error.append('Error executing a python function in %s:\n' % realfile)
  305. # Strip 'us' from the stack (better_exec call) unless that was where the
  306. # error came from
  307. if tb.tb_next is not None:
  308. tb = tb.tb_next
  309. textarray = text.split('\n')
  310. linefailed = tb.tb_lineno
  311. tbextract = traceback.extract_tb(tb)
  312. tbformat = traceback.format_list(tbextract)
  313. error.append("The stack trace of python calls that resulted in this exception/failure was:")
  314. error.append("File: '%s', lineno: %s, function: %s" % (tbextract[0][0], tbextract[0][1], tbextract[0][2]))
  315. error.extend(_print_trace(textarray, linefailed))
  316. # See if this is a function we constructed and has calls back into other functions in
  317. # "text". If so, try and improve the context of the error by diving down the trace
  318. level = 0
  319. nexttb = tb.tb_next
  320. while nexttb is not None and (level+1) < len(tbextract):
  321. error.append("File: '%s', lineno: %s, function: %s" % (tbextract[level+1][0], tbextract[level+1][1], tbextract[level+1][2]))
  322. if tbextract[level][0] == tbextract[level+1][0] and tbextract[level+1][2] == tbextract[level][0]:
  323. # The code was possibly in the string we compiled ourselves
  324. error.extend(_print_trace(textarray, tbextract[level+1][1]))
  325. elif tbextract[level+1][0].startswith("/"):
  326. # The code looks like it might be in a file, try and load it
  327. try:
  328. with open(tbextract[level+1][0], "r") as f:
  329. text = f.readlines()
  330. error.extend(_print_trace(text, tbextract[level+1][1]))
  331. except:
  332. error.append(tbformat[level+1])
  333. else:
  334. error.append(tbformat[level+1])
  335. nexttb = tb.tb_next
  336. level = level + 1
  337. error.append("Exception: %s" % ''.join(exception))
  338. # If the exception is from spwaning a task, let's be helpful and display
  339. # the output (which hopefully includes stderr).
  340. if isinstance(value, subprocess.CalledProcessError) and value.output:
  341. error.append("Subprocess output:")
  342. error.append(value.output.decode("utf-8", errors="ignore"))
  343. finally:
  344. logger.error("\n".join(error))
  345. def better_exec(code, context, text = None, realfile = "<code>", pythonexception=False):
  346. """
  347. Similiar to better_compile, better_exec will
  348. print the lines that are responsible for the
  349. error.
  350. """
  351. import bb.parse
  352. if not text:
  353. text = code
  354. if not hasattr(code, "co_filename"):
  355. code = better_compile(code, realfile, realfile)
  356. try:
  357. exec(code, get_context(), context)
  358. except (bb.BBHandledException, bb.parse.SkipRecipe, bb.data_smart.ExpansionError):
  359. # Error already shown so passthrough, no need for traceback
  360. raise
  361. except Exception as e:
  362. if pythonexception:
  363. raise
  364. (t, value, tb) = sys.exc_info()
  365. try:
  366. _print_exception(t, value, tb, realfile, text, context)
  367. except Exception as e:
  368. logger.error("Exception handler error: %s" % str(e))
  369. e = bb.BBHandledException(e)
  370. raise e
  371. def simple_exec(code, context):
  372. exec(code, get_context(), context)
  373. def better_eval(source, locals, extraglobals = None):
  374. ctx = get_context()
  375. if extraglobals:
  376. ctx = copy.copy(ctx)
  377. for g in extraglobals:
  378. ctx[g] = extraglobals[g]
  379. return eval(source, ctx, locals)
  380. @contextmanager
  381. def fileslocked(files):
  382. """Context manager for locking and unlocking file locks."""
  383. locks = []
  384. if files:
  385. for lockfile in files:
  386. locks.append(bb.utils.lockfile(lockfile))
  387. try:
  388. yield
  389. finally:
  390. for lock in locks:
  391. bb.utils.unlockfile(lock)
  392. @contextmanager
  393. def timeout(seconds):
  394. def timeout_handler(signum, frame):
  395. pass
  396. original_handler = signal.signal(signal.SIGALRM, timeout_handler)
  397. try:
  398. signal.alarm(seconds)
  399. yield
  400. finally:
  401. signal.alarm(0)
  402. signal.signal(signal.SIGALRM, original_handler)
  403. def lockfile(name, shared=False, retry=True, block=False):
  404. """
  405. Use the specified file as a lock file, return when the lock has
  406. been acquired. Returns a variable to pass to unlockfile().
  407. Parameters:
  408. retry: True to re-try locking if it fails, False otherwise
  409. block: True to block until the lock succeeds, False otherwise
  410. The retry and block parameters are kind of equivalent unless you
  411. consider the possibility of sending a signal to the process to break
  412. out - at which point you want block=True rather than retry=True.
  413. """
  414. dirname = os.path.dirname(name)
  415. mkdirhier(dirname)
  416. if not os.access(dirname, os.W_OK):
  417. logger.error("Unable to acquire lock '%s', directory is not writable",
  418. name)
  419. sys.exit(1)
  420. op = fcntl.LOCK_EX
  421. if shared:
  422. op = fcntl.LOCK_SH
  423. if not retry and not block:
  424. op = op | fcntl.LOCK_NB
  425. while True:
  426. # If we leave the lockfiles lying around there is no problem
  427. # but we should clean up after ourselves. This gives potential
  428. # for races though. To work around this, when we acquire the lock
  429. # we check the file we locked was still the lock file on disk.
  430. # by comparing inode numbers. If they don't match or the lockfile
  431. # no longer exists, we start again.
  432. # This implementation is unfair since the last person to request the
  433. # lock is the most likely to win it.
  434. try:
  435. lf = open(name, 'a+')
  436. fileno = lf.fileno()
  437. fcntl.flock(fileno, op)
  438. statinfo = os.fstat(fileno)
  439. if os.path.exists(lf.name):
  440. statinfo2 = os.stat(lf.name)
  441. if statinfo.st_ino == statinfo2.st_ino:
  442. return lf
  443. lf.close()
  444. except OSError as e:
  445. if e.errno == errno.EACCES:
  446. logger.error("Unable to acquire lock '%s', %s",
  447. e.strerror, name)
  448. sys.exit(1)
  449. try:
  450. lf.close()
  451. except Exception:
  452. pass
  453. pass
  454. if not retry:
  455. return None
  456. def unlockfile(lf):
  457. """
  458. Unlock a file locked using lockfile()
  459. """
  460. try:
  461. # If we had a shared lock, we need to promote to exclusive before
  462. # removing the lockfile. Attempt this, ignore failures.
  463. fcntl.flock(lf.fileno(), fcntl.LOCK_EX|fcntl.LOCK_NB)
  464. os.unlink(lf.name)
  465. except (IOError, OSError):
  466. pass
  467. fcntl.flock(lf.fileno(), fcntl.LOCK_UN)
  468. lf.close()
  469. def _hasher(method, filename):
  470. import mmap
  471. with open(filename, "rb") as f:
  472. try:
  473. with mmap.mmap(f.fileno(), 0, access=mmap.ACCESS_READ) as mm:
  474. for chunk in iter(lambda: mm.read(8192), b''):
  475. method.update(chunk)
  476. except ValueError:
  477. # You can't mmap() an empty file so silence this exception
  478. pass
  479. return method.hexdigest()
  480. def md5_file(filename):
  481. """
  482. Return the hex string representation of the MD5 checksum of filename.
  483. """
  484. import hashlib
  485. return _hasher(hashlib.md5(), filename)
  486. def sha256_file(filename):
  487. """
  488. Return the hex string representation of the 256-bit SHA checksum of
  489. filename.
  490. """
  491. import hashlib
  492. return _hasher(hashlib.sha256(), filename)
  493. def sha1_file(filename):
  494. """
  495. Return the hex string representation of the SHA1 checksum of the filename
  496. """
  497. import hashlib
  498. return _hasher(hashlib.sha1(), filename)
  499. def sha384_file(filename):
  500. """
  501. Return the hex string representation of the SHA384 checksum of the filename
  502. """
  503. import hashlib
  504. return _hasher(hashlib.sha384(), filename)
  505. def sha512_file(filename):
  506. """
  507. Return the hex string representation of the SHA512 checksum of the filename
  508. """
  509. import hashlib
  510. return _hasher(hashlib.sha512(), filename)
  511. def preserved_envvars_exported():
  512. """Variables which are taken from the environment and placed in and exported
  513. from the metadata"""
  514. return [
  515. 'BB_TASKHASH',
  516. 'HOME',
  517. 'LOGNAME',
  518. 'PATH',
  519. 'PWD',
  520. 'SHELL',
  521. 'TERM',
  522. 'USER',
  523. 'LC_ALL',
  524. 'BBSERVER',
  525. ]
  526. def preserved_envvars():
  527. """Variables which are taken from the environment and placed in the metadata"""
  528. v = [
  529. 'BBPATH',
  530. 'BB_PRESERVE_ENV',
  531. 'BB_ENV_WHITELIST',
  532. 'BB_ENV_EXTRAWHITE',
  533. ]
  534. return v + preserved_envvars_exported()
  535. def filter_environment(good_vars):
  536. """
  537. Create a pristine environment for bitbake. This will remove variables that
  538. are not known and may influence the build in a negative way.
  539. """
  540. removed_vars = {}
  541. for key in list(os.environ):
  542. if key in good_vars:
  543. continue
  544. removed_vars[key] = os.environ[key]
  545. del os.environ[key]
  546. # If we spawn a python process, we need to have a UTF-8 locale, else python's file
  547. # access methods will use ascii. You can't change that mode once the interpreter is
  548. # started so we have to ensure a locale is set. Ideally we'd use C.UTF-8 but not all
  549. # distros support that and we need to set something.
  550. os.environ["LC_ALL"] = "en_US.UTF-8"
  551. if removed_vars:
  552. logger.debug(1, "Removed the following variables from the environment: %s", ", ".join(removed_vars.keys()))
  553. return removed_vars
  554. def approved_variables():
  555. """
  556. Determine and return the list of whitelisted variables which are approved
  557. to remain in the environment.
  558. """
  559. if 'BB_PRESERVE_ENV' in os.environ:
  560. return os.environ.keys()
  561. approved = []
  562. if 'BB_ENV_WHITELIST' in os.environ:
  563. approved = os.environ['BB_ENV_WHITELIST'].split()
  564. approved.extend(['BB_ENV_WHITELIST'])
  565. else:
  566. approved = preserved_envvars()
  567. if 'BB_ENV_EXTRAWHITE' in os.environ:
  568. approved.extend(os.environ['BB_ENV_EXTRAWHITE'].split())
  569. if 'BB_ENV_EXTRAWHITE' not in approved:
  570. approved.extend(['BB_ENV_EXTRAWHITE'])
  571. return approved
  572. def clean_environment():
  573. """
  574. Clean up any spurious environment variables. This will remove any
  575. variables the user hasn't chosen to preserve.
  576. """
  577. if 'BB_PRESERVE_ENV' not in os.environ:
  578. good_vars = approved_variables()
  579. return filter_environment(good_vars)
  580. return {}
  581. def empty_environment():
  582. """
  583. Remove all variables from the environment.
  584. """
  585. for s in list(os.environ.keys()):
  586. os.unsetenv(s)
  587. del os.environ[s]
  588. def build_environment(d):
  589. """
  590. Build an environment from all exported variables.
  591. """
  592. import bb.data
  593. for var in bb.data.keys(d):
  594. export = d.getVarFlag(var, "export", False)
  595. if export:
  596. os.environ[var] = d.getVar(var) or ""
  597. def _check_unsafe_delete_path(path):
  598. """
  599. Basic safeguard against recursively deleting something we shouldn't. If it returns True,
  600. the caller should raise an exception with an appropriate message.
  601. NOTE: This is NOT meant to be a security mechanism - just a guard against silly mistakes
  602. with potentially disastrous results.
  603. """
  604. extra = ''
  605. # HOME might not be /home/something, so in case we can get it, check against it
  606. homedir = os.environ.get('HOME', '')
  607. if homedir:
  608. extra = '|%s' % homedir
  609. if re.match('(/|//|/home|/home/[^/]*%s)$' % extra, os.path.abspath(path)):
  610. return True
  611. return False
  612. def remove(path, recurse=False, ionice=False):
  613. """Equivalent to rm -f or rm -rf"""
  614. if not path:
  615. return
  616. if recurse:
  617. for name in glob.glob(path):
  618. if _check_unsafe_delete_path(path):
  619. raise Exception('bb.utils.remove: called with dangerous path "%s" and recurse=True, refusing to delete!' % path)
  620. # shutil.rmtree(name) would be ideal but its too slow
  621. cmd = []
  622. if ionice:
  623. cmd = ['ionice', '-c', '3']
  624. subprocess.check_call(cmd + ['rm', '-rf'] + glob.glob(path))
  625. return
  626. for name in glob.glob(path):
  627. try:
  628. os.unlink(name)
  629. except OSError as exc:
  630. if exc.errno != errno.ENOENT:
  631. raise
  632. def prunedir(topdir, ionice=False):
  633. # Delete everything reachable from the directory named in 'topdir'.
  634. # CAUTION: This is dangerous!
  635. if _check_unsafe_delete_path(topdir):
  636. raise Exception('bb.utils.prunedir: called with dangerous path "%s", refusing to delete!' % topdir)
  637. remove(topdir, recurse=True, ionice=ionice)
  638. #
  639. # Could also use return re.compile("(%s)" % "|".join(map(re.escape, suffixes))).sub(lambda mo: "", var)
  640. # but thats possibly insane and suffixes is probably going to be small
  641. #
  642. def prune_suffix(var, suffixes, d):
  643. # See if var ends with any of the suffixes listed and
  644. # remove it if found
  645. for suffix in suffixes:
  646. if suffix and var.endswith(suffix):
  647. return var[:-len(suffix)]
  648. return var
  649. def mkdirhier(directory):
  650. """Create a directory like 'mkdir -p', but does not complain if
  651. directory already exists like os.makedirs
  652. """
  653. try:
  654. os.makedirs(directory)
  655. except OSError as e:
  656. if e.errno != errno.EEXIST or not os.path.isdir(directory):
  657. raise e
  658. def movefile(src, dest, newmtime = None, sstat = None):
  659. """Moves a file from src to dest, preserving all permissions and
  660. attributes; mtime will be preserved even when moving across
  661. filesystems. Returns true on success and false on failure. Move is
  662. atomic.
  663. """
  664. #print "movefile(" + src + "," + dest + "," + str(newmtime) + "," + str(sstat) + ")"
  665. try:
  666. if not sstat:
  667. sstat = os.lstat(src)
  668. except Exception as e:
  669. print("movefile: Stating source file failed...", e)
  670. return None
  671. destexists = 1
  672. try:
  673. dstat = os.lstat(dest)
  674. except:
  675. dstat = os.lstat(os.path.dirname(dest))
  676. destexists = 0
  677. if destexists:
  678. if stat.S_ISLNK(dstat[stat.ST_MODE]):
  679. try:
  680. os.unlink(dest)
  681. destexists = 0
  682. except Exception as e:
  683. pass
  684. if stat.S_ISLNK(sstat[stat.ST_MODE]):
  685. try:
  686. target = os.readlink(src)
  687. if destexists and not stat.S_ISDIR(dstat[stat.ST_MODE]):
  688. os.unlink(dest)
  689. os.symlink(target, dest)
  690. #os.lchown(dest,sstat[stat.ST_UID],sstat[stat.ST_GID])
  691. os.unlink(src)
  692. return os.lstat(dest)
  693. except Exception as e:
  694. print("movefile: failed to properly create symlink:", dest, "->", target, e)
  695. return None
  696. renamefailed = 1
  697. # os.rename needs to know the dest path ending with file name
  698. # so append the file name to a path only if it's a dir specified
  699. srcfname = os.path.basename(src)
  700. destpath = os.path.join(dest, srcfname) if os.path.isdir(dest) \
  701. else dest
  702. if sstat[stat.ST_DEV] == dstat[stat.ST_DEV]:
  703. try:
  704. os.rename(src, destpath)
  705. renamefailed = 0
  706. except Exception as e:
  707. if e.errno != errno.EXDEV:
  708. # Some random error.
  709. print("movefile: Failed to move", src, "to", dest, e)
  710. return None
  711. # Invalid cross-device-link 'bind' mounted or actually Cross-Device
  712. if renamefailed:
  713. didcopy = 0
  714. if stat.S_ISREG(sstat[stat.ST_MODE]):
  715. try: # For safety copy then move it over.
  716. shutil.copyfile(src, destpath + "#new")
  717. os.rename(destpath + "#new", destpath)
  718. didcopy = 1
  719. except Exception as e:
  720. print('movefile: copy', src, '->', dest, 'failed.', e)
  721. return None
  722. else:
  723. #we don't yet handle special, so we need to fall back to /bin/mv
  724. a = getstatusoutput("/bin/mv -f " + "'" + src + "' '" + dest + "'")
  725. if a[0] != 0:
  726. print("movefile: Failed to move special file:" + src + "' to '" + dest + "'", a)
  727. return None # failure
  728. try:
  729. if didcopy:
  730. os.lchown(destpath, sstat[stat.ST_UID], sstat[stat.ST_GID])
  731. os.chmod(destpath, stat.S_IMODE(sstat[stat.ST_MODE])) # Sticky is reset on chown
  732. os.unlink(src)
  733. except Exception as e:
  734. print("movefile: Failed to chown/chmod/unlink", dest, e)
  735. return None
  736. if newmtime:
  737. os.utime(destpath, (newmtime, newmtime))
  738. else:
  739. os.utime(destpath, (sstat[stat.ST_ATIME], sstat[stat.ST_MTIME]))
  740. newmtime = sstat[stat.ST_MTIME]
  741. return newmtime
  742. def copyfile(src, dest, newmtime = None, sstat = None):
  743. """
  744. Copies a file from src to dest, preserving all permissions and
  745. attributes; mtime will be preserved even when moving across
  746. filesystems. Returns true on success and false on failure.
  747. """
  748. #print "copyfile(" + src + "," + dest + "," + str(newmtime) + "," + str(sstat) + ")"
  749. try:
  750. if not sstat:
  751. sstat = os.lstat(src)
  752. except Exception as e:
  753. logger.warning("copyfile: stat of %s failed (%s)" % (src, e))
  754. return False
  755. destexists = 1
  756. try:
  757. dstat = os.lstat(dest)
  758. except:
  759. dstat = os.lstat(os.path.dirname(dest))
  760. destexists = 0
  761. if destexists:
  762. if stat.S_ISLNK(dstat[stat.ST_MODE]):
  763. try:
  764. os.unlink(dest)
  765. destexists = 0
  766. except Exception as e:
  767. pass
  768. if stat.S_ISLNK(sstat[stat.ST_MODE]):
  769. try:
  770. target = os.readlink(src)
  771. if destexists and not stat.S_ISDIR(dstat[stat.ST_MODE]):
  772. os.unlink(dest)
  773. os.symlink(target, dest)
  774. #os.lchown(dest,sstat[stat.ST_UID],sstat[stat.ST_GID])
  775. return os.lstat(dest)
  776. except Exception as e:
  777. logger.warning("copyfile: failed to create symlink %s to %s (%s)" % (dest, target, e))
  778. return False
  779. if stat.S_ISREG(sstat[stat.ST_MODE]):
  780. try:
  781. srcchown = False
  782. if not os.access(src, os.R_OK):
  783. # Make sure we can read it
  784. srcchown = True
  785. os.chmod(src, sstat[stat.ST_MODE] | stat.S_IRUSR)
  786. # For safety copy then move it over.
  787. shutil.copyfile(src, dest + "#new")
  788. os.rename(dest + "#new", dest)
  789. except Exception as e:
  790. logger.warning("copyfile: copy %s to %s failed (%s)" % (src, dest, e))
  791. return False
  792. finally:
  793. if srcchown:
  794. os.chmod(src, sstat[stat.ST_MODE])
  795. os.utime(src, (sstat[stat.ST_ATIME], sstat[stat.ST_MTIME]))
  796. else:
  797. #we don't yet handle special, so we need to fall back to /bin/mv
  798. a = getstatusoutput("/bin/cp -f " + "'" + src + "' '" + dest + "'")
  799. if a[0] != 0:
  800. logger.warning("copyfile: failed to copy special file %s to %s (%s)" % (src, dest, a))
  801. return False # failure
  802. try:
  803. os.lchown(dest, sstat[stat.ST_UID], sstat[stat.ST_GID])
  804. os.chmod(dest, stat.S_IMODE(sstat[stat.ST_MODE])) # Sticky is reset on chown
  805. except Exception as e:
  806. logger.warning("copyfile: failed to chown/chmod %s (%s)" % (dest, e))
  807. return False
  808. if newmtime:
  809. os.utime(dest, (newmtime, newmtime))
  810. else:
  811. os.utime(dest, (sstat[stat.ST_ATIME], sstat[stat.ST_MTIME]))
  812. newmtime = sstat[stat.ST_MTIME]
  813. return newmtime
  814. def break_hardlinks(src, sstat = None):
  815. """
  816. Ensures src is the only hardlink to this file. Other hardlinks,
  817. if any, are not affected (other than in their st_nlink value, of
  818. course). Returns true on success and false on failure.
  819. """
  820. try:
  821. if not sstat:
  822. sstat = os.lstat(src)
  823. except Exception as e:
  824. logger.warning("break_hardlinks: stat of %s failed (%s)" % (src, e))
  825. return False
  826. if sstat[stat.ST_NLINK] == 1:
  827. return True
  828. return copyfile(src, src, sstat=sstat)
  829. def which(path, item, direction = 0, history = False, executable=False):
  830. """
  831. Locate `item` in the list of paths `path` (colon separated string like $PATH).
  832. If `direction` is non-zero then the list is reversed.
  833. If `history` is True then the list of candidates also returned as result,history.
  834. If `executable` is True then the candidate has to be an executable file,
  835. otherwise the candidate simply has to exist.
  836. """
  837. if executable:
  838. is_candidate = lambda p: os.path.isfile(p) and os.access(p, os.X_OK)
  839. else:
  840. is_candidate = lambda p: os.path.exists(p)
  841. hist = []
  842. paths = (path or "").split(':')
  843. if direction != 0:
  844. paths.reverse()
  845. for p in paths:
  846. next = os.path.join(p, item)
  847. hist.append(next)
  848. if is_candidate(next):
  849. if not os.path.isabs(next):
  850. next = os.path.abspath(next)
  851. if history:
  852. return next, hist
  853. return next
  854. if history:
  855. return "", hist
  856. return ""
  857. def to_boolean(string, default=None):
  858. if not string:
  859. return default
  860. normalized = string.lower()
  861. if normalized in ("y", "yes", "1", "true"):
  862. return True
  863. elif normalized in ("n", "no", "0", "false"):
  864. return False
  865. else:
  866. raise ValueError("Invalid value for to_boolean: %s" % string)
  867. def contains(variable, checkvalues, truevalue, falsevalue, d):
  868. """Check if a variable contains all the values specified.
  869. Arguments:
  870. variable -- the variable name. This will be fetched and expanded (using
  871. d.getVar(variable)) and then split into a set().
  872. checkvalues -- if this is a string it is split on whitespace into a set(),
  873. otherwise coerced directly into a set().
  874. truevalue -- the value to return if checkvalues is a subset of variable.
  875. falsevalue -- the value to return if variable is empty or if checkvalues is
  876. not a subset of variable.
  877. d -- the data store.
  878. """
  879. val = d.getVar(variable)
  880. if not val:
  881. return falsevalue
  882. val = set(val.split())
  883. if isinstance(checkvalues, str):
  884. checkvalues = set(checkvalues.split())
  885. else:
  886. checkvalues = set(checkvalues)
  887. if checkvalues.issubset(val):
  888. return truevalue
  889. return falsevalue
  890. def contains_any(variable, checkvalues, truevalue, falsevalue, d):
  891. val = d.getVar(variable)
  892. if not val:
  893. return falsevalue
  894. val = set(val.split())
  895. if isinstance(checkvalues, str):
  896. checkvalues = set(checkvalues.split())
  897. else:
  898. checkvalues = set(checkvalues)
  899. if checkvalues & val:
  900. return truevalue
  901. return falsevalue
  902. def filter(variable, checkvalues, d):
  903. """Return all words in the variable that are present in the checkvalues.
  904. Arguments:
  905. variable -- the variable name. This will be fetched and expanded (using
  906. d.getVar(variable)) and then split into a set().
  907. checkvalues -- if this is a string it is split on whitespace into a set(),
  908. otherwise coerced directly into a set().
  909. d -- the data store.
  910. """
  911. val = d.getVar(variable)
  912. if not val:
  913. return ''
  914. val = set(val.split())
  915. if isinstance(checkvalues, str):
  916. checkvalues = set(checkvalues.split())
  917. else:
  918. checkvalues = set(checkvalues)
  919. return ' '.join(sorted(checkvalues & val))
  920. def get_referenced_vars(start_expr, d):
  921. """
  922. :return: names of vars referenced in start_expr (recursively), in quasi-BFS order (variables within the same level
  923. are ordered arbitrarily)
  924. """
  925. seen = set()
  926. ret = []
  927. # The first entry in the queue is the unexpanded start expression
  928. queue = collections.deque([start_expr])
  929. # Subsequent entries will be variable names, so we need to track whether or not entry requires getVar
  930. is_first = True
  931. empty_data = bb.data.init()
  932. while queue:
  933. entry = queue.popleft()
  934. if is_first:
  935. # Entry is the start expression - no expansion needed
  936. is_first = False
  937. expression = entry
  938. else:
  939. # This is a variable name - need to get the value
  940. expression = d.getVar(entry, False)
  941. ret.append(entry)
  942. # expandWithRefs is how we actually get the referenced variables in the expression. We call it using an empty
  943. # data store because we only want the variables directly used in the expression. It returns a set, which is what
  944. # dooms us to only ever be "quasi-BFS" rather than full BFS.
  945. new_vars = empty_data.expandWithRefs(expression, None).references - set(seen)
  946. queue.extend(new_vars)
  947. seen.update(new_vars)
  948. return ret
  949. def cpu_count():
  950. return multiprocessing.cpu_count()
  951. def nonblockingfd(fd):
  952. fcntl.fcntl(fd, fcntl.F_SETFL, fcntl.fcntl(fd, fcntl.F_GETFL) | os.O_NONBLOCK)
  953. def process_profilelog(fn, pout = None):
  954. # Either call with a list of filenames and set pout or a filename and optionally pout.
  955. if not pout:
  956. pout = fn + '.processed'
  957. pout = open(pout, 'w')
  958. import pstats
  959. if isinstance(fn, list):
  960. p = pstats.Stats(*fn, stream=pout)
  961. else:
  962. p = pstats.Stats(fn, stream=pout)
  963. p.sort_stats('time')
  964. p.print_stats()
  965. p.print_callers()
  966. p.sort_stats('cumulative')
  967. p.print_stats()
  968. pout.flush()
  969. pout.close()
  970. #
  971. # Was present to work around multiprocessing pool bugs in python < 2.7.3
  972. #
  973. def multiprocessingpool(*args, **kwargs):
  974. import multiprocessing.pool
  975. #import multiprocessing.util
  976. #multiprocessing.util.log_to_stderr(10)
  977. # Deal with a multiprocessing bug where signals to the processes would be delayed until the work
  978. # completes. Putting in a timeout means the signals (like SIGINT/SIGTERM) get processed.
  979. def wrapper(func):
  980. def wrap(self, timeout=None):
  981. return func(self, timeout=timeout if timeout is not None else 1e100)
  982. return wrap
  983. multiprocessing.pool.IMapIterator.next = wrapper(multiprocessing.pool.IMapIterator.next)
  984. return multiprocessing.Pool(*args, **kwargs)
  985. def exec_flat_python_func(func, *args, **kwargs):
  986. """Execute a flat python function (defined with def funcname(args):...)"""
  987. # Prepare a small piece of python code which calls the requested function
  988. # To do this we need to prepare two things - a set of variables we can use to pass
  989. # the values of arguments into the calling function, and the list of arguments for
  990. # the function being called
  991. context = {}
  992. funcargs = []
  993. # Handle unnamed arguments
  994. aidx = 1
  995. for arg in args:
  996. argname = 'arg_%s' % aidx
  997. context[argname] = arg
  998. funcargs.append(argname)
  999. aidx += 1
  1000. # Handle keyword arguments
  1001. context.update(kwargs)
  1002. funcargs.extend(['%s=%s' % (arg, arg) for arg in kwargs.keys()])
  1003. code = 'retval = %s(%s)' % (func, ', '.join(funcargs))
  1004. comp = bb.utils.better_compile(code, '<string>', '<string>')
  1005. bb.utils.better_exec(comp, context, code, '<string>')
  1006. return context['retval']
  1007. def edit_metadata(meta_lines, variables, varfunc, match_overrides=False):
  1008. """Edit lines from a recipe or config file and modify one or more
  1009. specified variable values set in the file using a specified callback
  1010. function. Lines are expected to have trailing newlines.
  1011. Parameters:
  1012. meta_lines: lines from the file; can be a list or an iterable
  1013. (e.g. file pointer)
  1014. variables: a list of variable names to look for. Functions
  1015. may also be specified, but must be specified with '()' at
  1016. the end of the name. Note that the function doesn't have
  1017. any intrinsic understanding of _append, _prepend, _remove,
  1018. or overrides, so these are considered as part of the name.
  1019. These values go into a regular expression, so regular
  1020. expression syntax is allowed.
  1021. varfunc: callback function called for every variable matching
  1022. one of the entries in the variables parameter. The function
  1023. should take four arguments:
  1024. varname: name of variable matched
  1025. origvalue: current value in file
  1026. op: the operator (e.g. '+=')
  1027. newlines: list of lines up to this point. You can use
  1028. this to prepend lines before this variable setting
  1029. if you wish.
  1030. and should return a four-element tuple:
  1031. newvalue: new value to substitute in, or None to drop
  1032. the variable setting entirely. (If the removal
  1033. results in two consecutive blank lines, one of the
  1034. blank lines will also be dropped).
  1035. newop: the operator to use - if you specify None here,
  1036. the original operation will be used.
  1037. indent: number of spaces to indent multi-line entries,
  1038. or -1 to indent up to the level of the assignment
  1039. and opening quote, or a string to use as the indent.
  1040. minbreak: True to allow the first element of a
  1041. multi-line value to continue on the same line as
  1042. the assignment, False to indent before the first
  1043. element.
  1044. To clarify, if you wish not to change the value, then you
  1045. would return like this: return origvalue, None, 0, True
  1046. match_overrides: True to match items with _overrides on the end,
  1047. False otherwise
  1048. Returns a tuple:
  1049. updated:
  1050. True if changes were made, False otherwise.
  1051. newlines:
  1052. Lines after processing
  1053. """
  1054. var_res = {}
  1055. if match_overrides:
  1056. override_re = r'(_[a-zA-Z0-9-_$(){}]+)?'
  1057. else:
  1058. override_re = ''
  1059. for var in variables:
  1060. if var.endswith('()'):
  1061. var_res[var] = re.compile(r'^(%s%s)[ \\t]*\([ \\t]*\)[ \\t]*{' % (var[:-2].rstrip(), override_re))
  1062. else:
  1063. var_res[var] = re.compile(r'^(%s%s)[ \\t]*[?+:.]*=[+.]*[ \\t]*(["\'])' % (var, override_re))
  1064. updated = False
  1065. varset_start = ''
  1066. varlines = []
  1067. newlines = []
  1068. in_var = None
  1069. full_value = ''
  1070. var_end = ''
  1071. def handle_var_end():
  1072. prerun_newlines = newlines[:]
  1073. op = varset_start[len(in_var):].strip()
  1074. (newvalue, newop, indent, minbreak) = varfunc(in_var, full_value, op, newlines)
  1075. changed = (prerun_newlines != newlines)
  1076. if newvalue is None:
  1077. # Drop the value
  1078. return True
  1079. elif newvalue != full_value or (newop not in [None, op]):
  1080. if newop not in [None, op]:
  1081. # Callback changed the operator
  1082. varset_new = "%s %s" % (in_var, newop)
  1083. else:
  1084. varset_new = varset_start
  1085. if isinstance(indent, int):
  1086. if indent == -1:
  1087. indentspc = ' ' * (len(varset_new) + 2)
  1088. else:
  1089. indentspc = ' ' * indent
  1090. else:
  1091. indentspc = indent
  1092. if in_var.endswith('()'):
  1093. # A function definition
  1094. if isinstance(newvalue, list):
  1095. newlines.append('%s {\n%s%s\n}\n' % (varset_new, indentspc, ('\n%s' % indentspc).join(newvalue)))
  1096. else:
  1097. if not newvalue.startswith('\n'):
  1098. newvalue = '\n' + newvalue
  1099. if not newvalue.endswith('\n'):
  1100. newvalue = newvalue + '\n'
  1101. newlines.append('%s {%s}\n' % (varset_new, newvalue))
  1102. else:
  1103. # Normal variable
  1104. if isinstance(newvalue, list):
  1105. if not newvalue:
  1106. # Empty list -> empty string
  1107. newlines.append('%s ""\n' % varset_new)
  1108. elif minbreak:
  1109. # First item on first line
  1110. if len(newvalue) == 1:
  1111. newlines.append('%s "%s"\n' % (varset_new, newvalue[0]))
  1112. else:
  1113. newlines.append('%s "%s \\\n' % (varset_new, newvalue[0]))
  1114. for item in newvalue[1:]:
  1115. newlines.append('%s%s \\\n' % (indentspc, item))
  1116. newlines.append('%s"\n' % indentspc)
  1117. else:
  1118. # No item on first line
  1119. newlines.append('%s " \\\n' % varset_new)
  1120. for item in newvalue:
  1121. newlines.append('%s%s \\\n' % (indentspc, item))
  1122. newlines.append('%s"\n' % indentspc)
  1123. else:
  1124. newlines.append('%s "%s"\n' % (varset_new, newvalue))
  1125. return True
  1126. else:
  1127. # Put the old lines back where they were
  1128. newlines.extend(varlines)
  1129. # If newlines was touched by the function, we'll need to return True
  1130. return changed
  1131. checkspc = False
  1132. for line in meta_lines:
  1133. if in_var:
  1134. value = line.rstrip()
  1135. varlines.append(line)
  1136. if in_var.endswith('()'):
  1137. full_value += '\n' + value
  1138. else:
  1139. full_value += value[:-1]
  1140. if value.endswith(var_end):
  1141. if in_var.endswith('()'):
  1142. if full_value.count('{') - full_value.count('}') >= 0:
  1143. continue
  1144. full_value = full_value[:-1]
  1145. if handle_var_end():
  1146. updated = True
  1147. checkspc = True
  1148. in_var = None
  1149. else:
  1150. skip = False
  1151. for (varname, var_re) in var_res.items():
  1152. res = var_re.match(line)
  1153. if res:
  1154. isfunc = varname.endswith('()')
  1155. if isfunc:
  1156. splitvalue = line.split('{', 1)
  1157. var_end = '}'
  1158. else:
  1159. var_end = res.groups()[-1]
  1160. splitvalue = line.split(var_end, 1)
  1161. varset_start = splitvalue[0].rstrip()
  1162. value = splitvalue[1].rstrip()
  1163. if not isfunc and value.endswith('\\'):
  1164. value = value[:-1]
  1165. full_value = value
  1166. varlines = [line]
  1167. in_var = res.group(1)
  1168. if isfunc:
  1169. in_var += '()'
  1170. if value.endswith(var_end):
  1171. full_value = full_value[:-1]
  1172. if handle_var_end():
  1173. updated = True
  1174. checkspc = True
  1175. in_var = None
  1176. skip = True
  1177. break
  1178. if not skip:
  1179. if checkspc:
  1180. checkspc = False
  1181. if newlines and newlines[-1] == '\n' and line == '\n':
  1182. # Squash blank line if there are two consecutive blanks after a removal
  1183. continue
  1184. newlines.append(line)
  1185. return (updated, newlines)
  1186. def edit_metadata_file(meta_file, variables, varfunc):
  1187. """Edit a recipe or config file and modify one or more specified
  1188. variable values set in the file using a specified callback function.
  1189. The file is only written to if the value(s) actually change.
  1190. This is basically the file version of edit_metadata(), see that
  1191. function's description for parameter/usage information.
  1192. Returns True if the file was written to, False otherwise.
  1193. """
  1194. with open(meta_file, 'r') as f:
  1195. (updated, newlines) = edit_metadata(f, variables, varfunc)
  1196. if updated:
  1197. with open(meta_file, 'w') as f:
  1198. f.writelines(newlines)
  1199. return updated
  1200. def edit_bblayers_conf(bblayers_conf, add, remove, edit_cb=None):
  1201. """Edit bblayers.conf, adding and/or removing layers
  1202. Parameters:
  1203. bblayers_conf: path to bblayers.conf file to edit
  1204. add: layer path (or list of layer paths) to add; None or empty
  1205. list to add nothing
  1206. remove: layer path (or list of layer paths) to remove; None or
  1207. empty list to remove nothing
  1208. edit_cb: optional callback function that will be called after
  1209. processing adds/removes once per existing entry.
  1210. Returns a tuple:
  1211. notadded: list of layers specified to be added but weren't
  1212. (because they were already in the list)
  1213. notremoved: list of layers that were specified to be removed
  1214. but weren't (because they weren't in the list)
  1215. """
  1216. import fnmatch
  1217. def remove_trailing_sep(pth):
  1218. if pth and pth[-1] == os.sep:
  1219. pth = pth[:-1]
  1220. return pth
  1221. approved = bb.utils.approved_variables()
  1222. def canonicalise_path(pth):
  1223. pth = remove_trailing_sep(pth)
  1224. if 'HOME' in approved and '~' in pth:
  1225. pth = os.path.expanduser(pth)
  1226. return pth
  1227. def layerlist_param(value):
  1228. if not value:
  1229. return []
  1230. elif isinstance(value, list):
  1231. return [remove_trailing_sep(x) for x in value]
  1232. else:
  1233. return [remove_trailing_sep(value)]
  1234. addlayers = layerlist_param(add)
  1235. removelayers = layerlist_param(remove)
  1236. # Need to use a list here because we can't set non-local variables from a callback in python 2.x
  1237. bblayercalls = []
  1238. removed = []
  1239. plusequals = False
  1240. orig_bblayers = []
  1241. def handle_bblayers_firstpass(varname, origvalue, op, newlines):
  1242. bblayercalls.append(op)
  1243. if op == '=':
  1244. del orig_bblayers[:]
  1245. orig_bblayers.extend([canonicalise_path(x) for x in origvalue.split()])
  1246. return (origvalue, None, 2, False)
  1247. def handle_bblayers(varname, origvalue, op, newlines):
  1248. updated = False
  1249. bblayers = [remove_trailing_sep(x) for x in origvalue.split()]
  1250. if removelayers:
  1251. for removelayer in removelayers:
  1252. for layer in bblayers:
  1253. if fnmatch.fnmatch(canonicalise_path(layer), canonicalise_path(removelayer)):
  1254. updated = True
  1255. bblayers.remove(layer)
  1256. removed.append(removelayer)
  1257. break
  1258. if addlayers and not plusequals:
  1259. for addlayer in addlayers:
  1260. if addlayer not in bblayers:
  1261. updated = True
  1262. bblayers.append(addlayer)
  1263. del addlayers[:]
  1264. if edit_cb:
  1265. newlist = []
  1266. for layer in bblayers:
  1267. res = edit_cb(layer, canonicalise_path(layer))
  1268. if res != layer:
  1269. newlist.append(res)
  1270. updated = True
  1271. else:
  1272. newlist.append(layer)
  1273. bblayers = newlist
  1274. if updated:
  1275. if op == '+=' and not bblayers:
  1276. bblayers = None
  1277. return (bblayers, None, 2, False)
  1278. else:
  1279. return (origvalue, None, 2, False)
  1280. with open(bblayers_conf, 'r') as f:
  1281. (_, newlines) = edit_metadata(f, ['BBLAYERS'], handle_bblayers_firstpass)
  1282. if not bblayercalls:
  1283. raise Exception('Unable to find BBLAYERS in %s' % bblayers_conf)
  1284. # Try to do the "smart" thing depending on how the user has laid out
  1285. # their bblayers.conf file
  1286. if bblayercalls.count('+=') > 1:
  1287. plusequals = True
  1288. removelayers_canon = [canonicalise_path(layer) for layer in removelayers]
  1289. notadded = []
  1290. for layer in addlayers:
  1291. layer_canon = canonicalise_path(layer)
  1292. if layer_canon in orig_bblayers and not layer_canon in removelayers_canon:
  1293. notadded.append(layer)
  1294. notadded_canon = [canonicalise_path(layer) for layer in notadded]
  1295. addlayers[:] = [layer for layer in addlayers if canonicalise_path(layer) not in notadded_canon]
  1296. (updated, newlines) = edit_metadata(newlines, ['BBLAYERS'], handle_bblayers)
  1297. if addlayers:
  1298. # Still need to add these
  1299. for addlayer in addlayers:
  1300. newlines.append('BBLAYERS += "%s"\n' % addlayer)
  1301. updated = True
  1302. if updated:
  1303. with open(bblayers_conf, 'w') as f:
  1304. f.writelines(newlines)
  1305. notremoved = list(set(removelayers) - set(removed))
  1306. return (notadded, notremoved)
  1307. def get_file_layer(filename, d):
  1308. """Determine the collection (as defined by a layer's layer.conf file) containing the specified file"""
  1309. collections = (d.getVar('BBFILE_COLLECTIONS') or '').split()
  1310. collection_res = {}
  1311. for collection in collections:
  1312. collection_res[collection] = d.getVar('BBFILE_PATTERN_%s' % collection) or ''
  1313. def path_to_layer(path):
  1314. # Use longest path so we handle nested layers
  1315. matchlen = 0
  1316. match = None
  1317. for collection, regex in collection_res.items():
  1318. if len(regex) > matchlen and re.match(regex, path):
  1319. matchlen = len(regex)
  1320. match = collection
  1321. return match
  1322. result = None
  1323. bbfiles = (d.getVar('BBFILES') or '').split()
  1324. bbfilesmatch = False
  1325. for bbfilesentry in bbfiles:
  1326. if fnmatch.fnmatch(filename, bbfilesentry):
  1327. bbfilesmatch = True
  1328. result = path_to_layer(bbfilesentry)
  1329. if not bbfilesmatch:
  1330. # Probably a bbclass
  1331. result = path_to_layer(filename)
  1332. return result
  1333. # Constant taken from http://linux.die.net/include/linux/prctl.h
  1334. PR_SET_PDEATHSIG = 1
  1335. class PrCtlError(Exception):
  1336. pass
  1337. def signal_on_parent_exit(signame):
  1338. """
  1339. Trigger signame to be sent when the parent process dies
  1340. """
  1341. signum = getattr(signal, signame)
  1342. # http://linux.die.net/man/2/prctl
  1343. result = cdll['libc.so.6'].prctl(PR_SET_PDEATHSIG, signum)
  1344. if result != 0:
  1345. raise PrCtlError('prctl failed with error code %s' % result)
  1346. #
  1347. # Manually call the ioprio syscall. We could depend on other libs like psutil
  1348. # however this gets us enough of what we need to bitbake for now without the
  1349. # dependency
  1350. #
  1351. _unamearch = os.uname()[4]
  1352. IOPRIO_WHO_PROCESS = 1
  1353. IOPRIO_CLASS_SHIFT = 13
  1354. def ioprio_set(who, cls, value):
  1355. NR_ioprio_set = None
  1356. if _unamearch == "x86_64":
  1357. NR_ioprio_set = 251
  1358. elif _unamearch[0] == "i" and _unamearch[2:3] == "86":
  1359. NR_ioprio_set = 289
  1360. elif _unamearch == "aarch64":
  1361. NR_ioprio_set = 30
  1362. if NR_ioprio_set:
  1363. ioprio = value | (cls << IOPRIO_CLASS_SHIFT)
  1364. rc = cdll['libc.so.6'].syscall(NR_ioprio_set, IOPRIO_WHO_PROCESS, who, ioprio)
  1365. if rc != 0:
  1366. raise ValueError("Unable to set ioprio, syscall returned %s" % rc)
  1367. else:
  1368. bb.warn("Unable to set IO Prio for arch %s" % _unamearch)
  1369. def set_process_name(name):
  1370. from ctypes import cdll, byref, create_string_buffer
  1371. # This is nice to have for debugging, not essential
  1372. try:
  1373. libc = cdll.LoadLibrary('libc.so.6')
  1374. buf = create_string_buffer(bytes(name, 'utf-8'))
  1375. libc.prctl(15, byref(buf), 0, 0, 0)
  1376. except:
  1377. pass
  1378. # export common proxies variables from datastore to environment
  1379. def export_proxies(d):
  1380. import os
  1381. variables = ['http_proxy', 'HTTP_PROXY', 'https_proxy', 'HTTPS_PROXY',
  1382. 'ftp_proxy', 'FTP_PROXY', 'no_proxy', 'NO_PROXY',
  1383. 'GIT_PROXY_COMMAND']
  1384. exported = False
  1385. for v in variables:
  1386. if v in os.environ.keys():
  1387. exported = True
  1388. else:
  1389. v_proxy = d.getVar(v)
  1390. if v_proxy is not None:
  1391. os.environ[v] = v_proxy
  1392. exported = True
  1393. return exported
  1394. def load_plugins(logger, plugins, pluginpath):
  1395. def load_plugin(name):
  1396. logger.debug(1, 'Loading plugin %s' % name)
  1397. spec = importlib.machinery.PathFinder.find_spec(name, path=[pluginpath] )
  1398. if spec:
  1399. return spec.loader.load_module()
  1400. logger.debug(1, 'Loading plugins from %s...' % pluginpath)
  1401. expanded = (glob.glob(os.path.join(pluginpath, '*' + ext))
  1402. for ext in python_extensions)
  1403. files = itertools.chain.from_iterable(expanded)
  1404. names = set(os.path.splitext(os.path.basename(fn))[0] for fn in files)
  1405. for name in names:
  1406. if name != '__init__':
  1407. plugin = load_plugin(name)
  1408. if hasattr(plugin, 'plugin_init'):
  1409. obj = plugin.plugin_init(plugins)
  1410. plugins.append(obj or plugin)
  1411. else:
  1412. plugins.append(plugin)
  1413. class LogCatcher(logging.Handler):
  1414. """Logging handler for collecting logged messages so you can check them later"""
  1415. def __init__(self):
  1416. self.messages = []
  1417. logging.Handler.__init__(self, logging.WARNING)
  1418. def emit(self, record):
  1419. self.messages.append(bb.build.logformatter.format(record))
  1420. def contains(self, message):
  1421. return (message in self.messages)