utils.py 18 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532
  1. #
  2. # Copyright OpenEmbedded Contributors
  3. #
  4. # SPDX-License-Identifier: GPL-2.0-only
  5. #
  6. import subprocess
  7. import traceback
  8. import errno
  9. from bb import multiprocessing
  10. def read_file(filename):
  11. try:
  12. f = open( filename, "r" )
  13. except IOError as reason:
  14. return "" # WARNING: can't raise an error now because of the new RDEPENDS handling. This is a bit ugly. :M:
  15. else:
  16. data = f.read().strip()
  17. f.close()
  18. return data
  19. return None
  20. def ifelse(condition, iftrue = True, iffalse = False):
  21. if condition:
  22. return iftrue
  23. else:
  24. return iffalse
  25. def conditional(variable, checkvalue, truevalue, falsevalue, d):
  26. if d.getVar(variable) == checkvalue:
  27. return truevalue
  28. else:
  29. return falsevalue
  30. def vartrue(var, iftrue, iffalse, d):
  31. import oe.types
  32. if oe.types.boolean(d.getVar(var)):
  33. return iftrue
  34. else:
  35. return iffalse
  36. def less_or_equal(variable, checkvalue, truevalue, falsevalue, d):
  37. if float(d.getVar(variable)) <= float(checkvalue):
  38. return truevalue
  39. else:
  40. return falsevalue
  41. def version_less_or_equal(variable, checkvalue, truevalue, falsevalue, d):
  42. result = bb.utils.vercmp_string(d.getVar(variable), checkvalue)
  43. if result <= 0:
  44. return truevalue
  45. else:
  46. return falsevalue
  47. def both_contain(variable1, variable2, checkvalue, d):
  48. val1 = d.getVar(variable1)
  49. val2 = d.getVar(variable2)
  50. val1 = set(val1.split())
  51. val2 = set(val2.split())
  52. if isinstance(checkvalue, str):
  53. checkvalue = set(checkvalue.split())
  54. else:
  55. checkvalue = set(checkvalue)
  56. if checkvalue.issubset(val1) and checkvalue.issubset(val2):
  57. return " ".join(checkvalue)
  58. else:
  59. return ""
  60. def set_intersect(variable1, variable2, d):
  61. """
  62. Expand both variables, interpret them as lists of strings, and return the
  63. intersection as a flattened string.
  64. For example:
  65. s1 = "a b c"
  66. s2 = "b c d"
  67. s3 = set_intersect(s1, s2)
  68. => s3 = "b c"
  69. """
  70. val1 = set(d.getVar(variable1).split())
  71. val2 = set(d.getVar(variable2).split())
  72. return " ".join(val1 & val2)
  73. def prune_suffix(var, suffixes, d):
  74. # See if var ends with any of the suffixes listed and
  75. # remove it if found
  76. for suffix in suffixes:
  77. if suffix and var.endswith(suffix):
  78. var = var[:-len(suffix)]
  79. prefix = d.getVar("MLPREFIX")
  80. if prefix and var.startswith(prefix):
  81. var = var[len(prefix):]
  82. return var
  83. def str_filter(f, str, d):
  84. from re import match
  85. return " ".join([x for x in str.split() if match(f, x, 0)])
  86. def str_filter_out(f, str, d):
  87. from re import match
  88. return " ".join([x for x in str.split() if not match(f, x, 0)])
  89. def build_depends_string(depends, task):
  90. """Append a taskname to a string of dependencies as used by the [depends] flag"""
  91. return " ".join(dep + ":" + task for dep in depends.split())
  92. def inherits(d, *classes):
  93. """Return True if the metadata inherits any of the specified classes"""
  94. return any(bb.data.inherits_class(cls, d) for cls in classes)
  95. def features_backfill(var,d):
  96. # This construct allows the addition of new features to variable specified
  97. # as var
  98. # Example for var = "DISTRO_FEATURES"
  99. # This construct allows the addition of new features to DISTRO_FEATURES
  100. # that if not present would disable existing functionality, without
  101. # disturbing distributions that have already set DISTRO_FEATURES.
  102. # Distributions wanting to elide a value in DISTRO_FEATURES_BACKFILL should
  103. # add the feature to DISTRO_FEATURES_BACKFILL_CONSIDERED
  104. features = (d.getVar(var) or "").split()
  105. backfill = (d.getVar(var+"_BACKFILL") or "").split()
  106. considered = (d.getVar(var+"_BACKFILL_CONSIDERED") or "").split()
  107. addfeatures = []
  108. for feature in backfill:
  109. if feature not in features and feature not in considered:
  110. addfeatures.append(feature)
  111. if addfeatures:
  112. d.appendVar(var, " " + " ".join(addfeatures))
  113. def all_distro_features(d, features, truevalue="1", falsevalue=""):
  114. """
  115. Returns truevalue if *all* given features are set in DISTRO_FEATURES,
  116. else falsevalue. The features can be given as single string or anything
  117. that can be turned into a set.
  118. This is a shorter, more flexible version of
  119. bb.utils.contains("DISTRO_FEATURES", features, truevalue, falsevalue, d).
  120. Without explicit true/false values it can be used directly where
  121. Python expects a boolean:
  122. if oe.utils.all_distro_features(d, "foo bar"):
  123. bb.fatal("foo and bar are mutually exclusive DISTRO_FEATURES")
  124. With just a truevalue, it can be used to include files that are meant to be
  125. used only when requested via DISTRO_FEATURES:
  126. require ${@ oe.utils.all_distro_features(d, "foo bar", "foo-and-bar.inc")
  127. """
  128. return bb.utils.contains("DISTRO_FEATURES", features, truevalue, falsevalue, d)
  129. def any_distro_features(d, features, truevalue="1", falsevalue=""):
  130. """
  131. Returns truevalue if at least *one* of the given features is set in DISTRO_FEATURES,
  132. else falsevalue. The features can be given as single string or anything
  133. that can be turned into a set.
  134. This is a shorter, more flexible version of
  135. bb.utils.contains_any("DISTRO_FEATURES", features, truevalue, falsevalue, d).
  136. Without explicit true/false values it can be used directly where
  137. Python expects a boolean:
  138. if not oe.utils.any_distro_features(d, "foo bar"):
  139. bb.fatal("foo, bar or both must be set in DISTRO_FEATURES")
  140. With just a truevalue, it can be used to include files that are meant to be
  141. used only when requested via DISTRO_FEATURES:
  142. require ${@ oe.utils.any_distro_features(d, "foo bar", "foo-or-bar.inc")
  143. """
  144. return bb.utils.contains_any("DISTRO_FEATURES", features, truevalue, falsevalue, d)
  145. def parallel_make(d, makeinst=False):
  146. """
  147. Return the integer value for the number of parallel threads to use when
  148. building, scraped out of PARALLEL_MAKE. If no parallelization option is
  149. found, returns None
  150. e.g. if PARALLEL_MAKE = "-j 10", this will return 10 as an integer.
  151. """
  152. if makeinst:
  153. pm = (d.getVar('PARALLEL_MAKEINST') or '').split()
  154. else:
  155. pm = (d.getVar('PARALLEL_MAKE') or '').split()
  156. # look for '-j' and throw other options (e.g. '-l') away
  157. while pm:
  158. opt = pm.pop(0)
  159. if opt == '-j':
  160. v = pm.pop(0)
  161. elif opt.startswith('-j'):
  162. v = opt[2:].strip()
  163. else:
  164. continue
  165. return int(v)
  166. return ''
  167. def parallel_make_argument(d, fmt, limit=None, makeinst=False):
  168. """
  169. Helper utility to construct a parallel make argument from the number of
  170. parallel threads specified in PARALLEL_MAKE.
  171. Returns the input format string `fmt` where a single '%d' will be expanded
  172. with the number of parallel threads to use. If `limit` is specified, the
  173. number of parallel threads will be no larger than it. If no parallelization
  174. option is found in PARALLEL_MAKE, returns an empty string
  175. e.g. if PARALLEL_MAKE = "-j 10", parallel_make_argument(d, "-n %d") will return
  176. "-n 10"
  177. """
  178. v = parallel_make(d, makeinst)
  179. if v:
  180. if limit:
  181. v = min(limit, v)
  182. return fmt % v
  183. return ''
  184. def packages_filter_out_system(d):
  185. """
  186. Return a list of packages from PACKAGES with the "system" packages such as
  187. PN-dbg PN-doc PN-locale-eb-gb removed.
  188. """
  189. pn = d.getVar('PN')
  190. pkgfilter = [pn + suffix for suffix in ('', '-dbg', '-dev', '-doc', '-locale', '-staticdev', '-src')]
  191. localepkg = pn + "-locale-"
  192. pkgs = []
  193. for pkg in d.getVar('PACKAGES').split():
  194. if pkg not in pkgfilter and localepkg not in pkg:
  195. pkgs.append(pkg)
  196. return pkgs
  197. def getstatusoutput(cmd):
  198. return subprocess.getstatusoutput(cmd)
  199. def trim_version(version, num_parts=2):
  200. """
  201. Return just the first <num_parts> of <version>, split by periods. For
  202. example, trim_version("1.2.3", 2) will return "1.2".
  203. """
  204. if type(version) is not str:
  205. raise TypeError("Version should be a string")
  206. if num_parts < 1:
  207. raise ValueError("Cannot split to parts < 1")
  208. parts = version.split(".")
  209. trimmed = ".".join(parts[:num_parts])
  210. return trimmed
  211. def cpu_count(at_least=1, at_most=64):
  212. cpus = len(os.sched_getaffinity(0))
  213. return max(min(cpus, at_most), at_least)
  214. def execute_pre_post_process(d, cmds):
  215. if cmds is None:
  216. return
  217. cmds = cmds.replace(";", " ")
  218. for cmd in cmds.split():
  219. bb.note("Executing %s ..." % cmd)
  220. bb.build.exec_func(cmd, d)
  221. def get_bb_number_threads(d):
  222. return int(d.getVar("BB_NUMBER_THREADS") or os.cpu_count() or 1)
  223. def multiprocess_launch(target, items, d, extraargs=None):
  224. max_process = get_bb_number_threads(d)
  225. return multiprocess_launch_mp(target, items, max_process, extraargs)
  226. # For each item in items, call the function 'target' with item as the first
  227. # argument, extraargs as the other arguments and handle any exceptions in the
  228. # parent thread
  229. def multiprocess_launch_mp(target, items, max_process, extraargs=None):
  230. class ProcessLaunch(multiprocessing.Process):
  231. def __init__(self, *args, **kwargs):
  232. multiprocessing.Process.__init__(self, *args, **kwargs)
  233. self._pconn, self._cconn = multiprocessing.Pipe()
  234. self._exception = None
  235. self._result = None
  236. def run(self):
  237. try:
  238. ret = self._target(*self._args, **self._kwargs)
  239. self._cconn.send((None, ret))
  240. except Exception as e:
  241. tb = traceback.format_exc()
  242. self._cconn.send((e, tb))
  243. def update(self):
  244. if self._pconn.poll():
  245. (e, tb) = self._pconn.recv()
  246. if e is not None:
  247. self._exception = (e, tb)
  248. else:
  249. self._result = tb
  250. @property
  251. def exception(self):
  252. self.update()
  253. return self._exception
  254. @property
  255. def result(self):
  256. self.update()
  257. return self._result
  258. launched = []
  259. errors = []
  260. results = []
  261. items = list(items)
  262. while (items and not errors) or launched:
  263. if not errors and items and len(launched) < max_process:
  264. args = items.pop()
  265. if not type(args) is tuple:
  266. args = (args,)
  267. if extraargs is not None:
  268. args = args + extraargs
  269. p = ProcessLaunch(target=target, args=args)
  270. p.start()
  271. launched.append(p)
  272. for q in launched:
  273. # Have to manually call update() to avoid deadlocks. The pipe can be full and
  274. # transfer stalled until we try and read the results object but the subprocess won't exit
  275. # as it still has data to write (https://bugs.python.org/issue8426)
  276. q.update()
  277. # The finished processes are joined when calling is_alive()
  278. if not q.is_alive():
  279. if q.exception:
  280. errors.append(q.exception)
  281. if q.result:
  282. results.append(q.result)
  283. launched.remove(q)
  284. # Paranoia doesn't hurt
  285. for p in launched:
  286. p.join()
  287. if errors:
  288. msg = ""
  289. for (e, tb) in errors:
  290. if isinstance(e, subprocess.CalledProcessError) and e.output:
  291. msg = msg + str(e) + "\n"
  292. msg = msg + "Subprocess output:"
  293. msg = msg + e.output.decode("utf-8", errors="ignore")
  294. else:
  295. msg = msg + str(e) + ": " + str(tb) + "\n"
  296. bb.fatal("Fatal errors occurred in subprocesses:\n%s" % msg)
  297. return results
  298. def squashspaces(string):
  299. import re
  300. return re.sub(r"\s+", " ", string).strip()
  301. def rprovides_map(pkgdata_dir, pkg_dict):
  302. # Map file -> pkg provider
  303. rprov_map = {}
  304. for pkg in pkg_dict:
  305. path_to_pkgfile = os.path.join(pkgdata_dir, 'runtime-reverse', pkg)
  306. if not os.path.isfile(path_to_pkgfile):
  307. continue
  308. with open(path_to_pkgfile) as f:
  309. for line in f:
  310. if line.startswith('RPROVIDES') or line.startswith('FILERPROVIDES'):
  311. # List all components provided by pkg.
  312. # Exclude version strings, i.e. those starting with (
  313. provides = [x for x in line.split()[1:] if not x.startswith('(')]
  314. for prov in provides:
  315. if prov in rprov_map:
  316. rprov_map[prov].append(pkg)
  317. else:
  318. rprov_map[prov] = [pkg]
  319. return rprov_map
  320. def format_pkg_list(pkg_dict, ret_format=None, pkgdata_dir=None):
  321. output = []
  322. if ret_format == "arch":
  323. for pkg in sorted(pkg_dict):
  324. output.append("%s %s" % (pkg, pkg_dict[pkg]["arch"]))
  325. elif ret_format == "file":
  326. for pkg in sorted(pkg_dict):
  327. output.append("%s %s %s" % (pkg, pkg_dict[pkg]["filename"], pkg_dict[pkg]["arch"]))
  328. elif ret_format == "ver":
  329. for pkg in sorted(pkg_dict):
  330. output.append("%s %s %s" % (pkg, pkg_dict[pkg]["arch"], pkg_dict[pkg]["ver"]))
  331. elif ret_format == "deps":
  332. rprov_map = rprovides_map(pkgdata_dir, pkg_dict)
  333. for pkg in sorted(pkg_dict):
  334. for dep in pkg_dict[pkg]["deps"]:
  335. if dep in rprov_map:
  336. # There could be multiple providers within the image
  337. for pkg_provider in rprov_map[dep]:
  338. output.append("%s|%s * %s [RPROVIDES]" % (pkg, pkg_provider, dep))
  339. else:
  340. output.append("%s|%s" % (pkg, dep))
  341. else:
  342. for pkg in sorted(pkg_dict):
  343. output.append(pkg)
  344. output_str = '\n'.join(output)
  345. if output_str:
  346. # make sure last line is newline terminated
  347. output_str += '\n'
  348. return output_str
  349. # Helper function to get the host compiler version
  350. # Do not assume the compiler is gcc
  351. def get_host_compiler_version(d, taskcontextonly=False):
  352. import re, subprocess
  353. if taskcontextonly and d.getVar('BB_WORKERCONTEXT') != '1':
  354. return
  355. compiler = d.getVar("BUILD_CC")
  356. # Get rid of ccache since it is not present when parsing.
  357. if compiler.startswith('ccache '):
  358. compiler = compiler[7:]
  359. try:
  360. env = os.environ.copy()
  361. # datastore PATH does not contain session PATH as set by environment-setup-...
  362. # this breaks the install-buildtools use-case
  363. # env["PATH"] = d.getVar("PATH")
  364. output = subprocess.check_output("%s --version" % compiler, \
  365. shell=True, env=env, stderr=subprocess.STDOUT).decode("utf-8")
  366. except subprocess.CalledProcessError as e:
  367. bb.fatal("Error running %s --version: %s" % (compiler, e.output.decode("utf-8")))
  368. match = re.match(r".* (\d+\.\d+)\.\d+.*", output.split('\n')[0])
  369. if not match:
  370. bb.fatal("Can't get compiler version from %s --version output" % compiler)
  371. version = match.group(1)
  372. return compiler, version
  373. def host_gcc_version(d, taskcontextonly=False):
  374. import re, subprocess
  375. if taskcontextonly and d.getVar('BB_WORKERCONTEXT') != '1':
  376. return
  377. compiler = d.getVar("BUILD_CC")
  378. # Get rid of ccache since it is not present when parsing.
  379. if compiler.startswith('ccache '):
  380. compiler = compiler[7:]
  381. try:
  382. env = os.environ.copy()
  383. env["PATH"] = d.getVar("PATH")
  384. output = subprocess.check_output("%s --version" % compiler, \
  385. shell=True, env=env, stderr=subprocess.STDOUT).decode("utf-8")
  386. except subprocess.CalledProcessError as e:
  387. bb.fatal("Error running %s --version: %s" % (compiler, e.output.decode("utf-8")))
  388. match = re.match(r".* (\d+\.\d+)\.\d+.*", output.split('\n')[0])
  389. if not match:
  390. bb.fatal("Can't get compiler version from %s --version output" % compiler)
  391. version = match.group(1)
  392. return "-%s" % version if version in ("4.8", "4.9") else ""
  393. def get_multilib_datastore(variant, d):
  394. localdata = bb.data.createCopy(d)
  395. if variant:
  396. overrides = localdata.getVar("OVERRIDES", False) + ":virtclass-multilib-" + variant
  397. localdata.setVar("OVERRIDES", overrides)
  398. localdata.setVar("MLPREFIX", variant + "-")
  399. else:
  400. origdefault = localdata.getVar("DEFAULTTUNE_MULTILIB_ORIGINAL")
  401. if origdefault:
  402. localdata.setVar("DEFAULTTUNE", origdefault)
  403. overrides = localdata.getVar("OVERRIDES", False).split(":")
  404. overrides = ":".join([x for x in overrides if not x.startswith("virtclass-multilib-")])
  405. localdata.setVar("OVERRIDES", overrides)
  406. localdata.setVar("MLPREFIX", "")
  407. return localdata
  408. def sh_quote(string):
  409. import shlex
  410. return shlex.quote(string)
  411. def directory_size(root, blocksize=4096):
  412. """
  413. Calculate the size of the directory, taking into account hard links,
  414. rounding up every size to multiples of the blocksize.
  415. """
  416. def roundup(size):
  417. """
  418. Round the size up to the nearest multiple of the block size.
  419. """
  420. import math
  421. return math.ceil(size / blocksize) * blocksize
  422. def getsize(filename):
  423. """
  424. Get the size of the filename, not following symlinks, taking into
  425. account hard links.
  426. """
  427. stat = os.lstat(filename)
  428. if stat.st_ino not in inodes:
  429. inodes.add(stat.st_ino)
  430. return stat.st_size
  431. else:
  432. return 0
  433. inodes = set()
  434. total = 0
  435. for root, dirs, files in os.walk(root):
  436. total += sum(roundup(getsize(os.path.join(root, name))) for name in files)
  437. total += roundup(getsize(root))
  438. return total
  439. # Update the mtime of a file, skip if permission/read-only issues
  440. def touch(filename):
  441. try:
  442. os.utime(filename, None)
  443. except PermissionError:
  444. pass
  445. except OSError as e:
  446. # Handle read-only file systems gracefully
  447. if e.errno != errno.EROFS:
  448. raise e