utils.py 18 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524
  1. #
  2. # Copyright OpenEmbedded Contributors
  3. #
  4. # SPDX-License-Identifier: GPL-2.0-only
  5. #
  6. import subprocess
  7. import multiprocessing
  8. import traceback
  9. def read_file(filename):
  10. try:
  11. f = open( filename, "r" )
  12. except IOError as reason:
  13. return "" # WARNING: can't raise an error now because of the new RDEPENDS handling. This is a bit ugly. :M:
  14. else:
  15. data = f.read().strip()
  16. f.close()
  17. return data
  18. return None
  19. def ifelse(condition, iftrue = True, iffalse = False):
  20. if condition:
  21. return iftrue
  22. else:
  23. return iffalse
  24. def conditional(variable, checkvalue, truevalue, falsevalue, d):
  25. if d.getVar(variable) == checkvalue:
  26. return truevalue
  27. else:
  28. return falsevalue
  29. def vartrue(var, iftrue, iffalse, d):
  30. import oe.types
  31. if oe.types.boolean(d.getVar(var)):
  32. return iftrue
  33. else:
  34. return iffalse
  35. def less_or_equal(variable, checkvalue, truevalue, falsevalue, d):
  36. if float(d.getVar(variable)) <= float(checkvalue):
  37. return truevalue
  38. else:
  39. return falsevalue
  40. def version_less_or_equal(variable, checkvalue, truevalue, falsevalue, d):
  41. result = bb.utils.vercmp_string(d.getVar(variable), checkvalue)
  42. if result <= 0:
  43. return truevalue
  44. else:
  45. return falsevalue
  46. def both_contain(variable1, variable2, checkvalue, d):
  47. val1 = d.getVar(variable1)
  48. val2 = d.getVar(variable2)
  49. val1 = set(val1.split())
  50. val2 = set(val2.split())
  51. if isinstance(checkvalue, str):
  52. checkvalue = set(checkvalue.split())
  53. else:
  54. checkvalue = set(checkvalue)
  55. if checkvalue.issubset(val1) and checkvalue.issubset(val2):
  56. return " ".join(checkvalue)
  57. else:
  58. return ""
  59. def set_intersect(variable1, variable2, d):
  60. """
  61. Expand both variables, interpret them as lists of strings, and return the
  62. intersection as a flattened string.
  63. For example:
  64. s1 = "a b c"
  65. s2 = "b c d"
  66. s3 = set_intersect(s1, s2)
  67. => s3 = "b c"
  68. """
  69. val1 = set(d.getVar(variable1).split())
  70. val2 = set(d.getVar(variable2).split())
  71. return " ".join(val1 & val2)
  72. def prune_suffix(var, suffixes, d):
  73. # See if var ends with any of the suffixes listed and
  74. # remove it if found
  75. for suffix in suffixes:
  76. if suffix and var.endswith(suffix):
  77. var = var[:-len(suffix)]
  78. prefix = d.getVar("MLPREFIX")
  79. if prefix and var.startswith(prefix):
  80. var = var[len(prefix):]
  81. return var
  82. def str_filter(f, str, d):
  83. from re import match
  84. return " ".join([x for x in str.split() if match(f, x, 0)])
  85. def str_filter_out(f, str, d):
  86. from re import match
  87. return " ".join([x for x in str.split() if not match(f, x, 0)])
  88. def build_depends_string(depends, task):
  89. """Append a taskname to a string of dependencies as used by the [depends] flag"""
  90. return " ".join(dep + ":" + task for dep in depends.split())
  91. def inherits(d, *classes):
  92. """Return True if the metadata inherits any of the specified classes"""
  93. return any(bb.data.inherits_class(cls, d) for cls in classes)
  94. def features_backfill(var,d):
  95. # This construct allows the addition of new features to variable specified
  96. # as var
  97. # Example for var = "DISTRO_FEATURES"
  98. # This construct allows the addition of new features to DISTRO_FEATURES
  99. # that if not present would disable existing functionality, without
  100. # disturbing distributions that have already set DISTRO_FEATURES.
  101. # Distributions wanting to elide a value in DISTRO_FEATURES_BACKFILL should
  102. # add the feature to DISTRO_FEATURES_BACKFILL_CONSIDERED
  103. features = (d.getVar(var) or "").split()
  104. backfill = (d.getVar(var+"_BACKFILL") or "").split()
  105. considered = (d.getVar(var+"_BACKFILL_CONSIDERED") or "").split()
  106. addfeatures = []
  107. for feature in backfill:
  108. if feature not in features and feature not in considered:
  109. addfeatures.append(feature)
  110. if addfeatures:
  111. d.appendVar(var, " " + " ".join(addfeatures))
  112. def all_distro_features(d, features, truevalue="1", falsevalue=""):
  113. """
  114. Returns truevalue if *all* given features are set in DISTRO_FEATURES,
  115. else falsevalue. The features can be given as single string or anything
  116. that can be turned into a set.
  117. This is a shorter, more flexible version of
  118. bb.utils.contains("DISTRO_FEATURES", features, truevalue, falsevalue, d).
  119. Without explicit true/false values it can be used directly where
  120. Python expects a boolean:
  121. if oe.utils.all_distro_features(d, "foo bar"):
  122. bb.fatal("foo and bar are mutually exclusive DISTRO_FEATURES")
  123. With just a truevalue, it can be used to include files that are meant to be
  124. used only when requested via DISTRO_FEATURES:
  125. require ${@ oe.utils.all_distro_features(d, "foo bar", "foo-and-bar.inc")
  126. """
  127. return bb.utils.contains("DISTRO_FEATURES", features, truevalue, falsevalue, d)
  128. def any_distro_features(d, features, truevalue="1", falsevalue=""):
  129. """
  130. Returns truevalue if at least *one* of the given features is set in DISTRO_FEATURES,
  131. else falsevalue. The features can be given as single string or anything
  132. that can be turned into a set.
  133. This is a shorter, more flexible version of
  134. bb.utils.contains_any("DISTRO_FEATURES", features, truevalue, falsevalue, d).
  135. Without explicit true/false values it can be used directly where
  136. Python expects a boolean:
  137. if not oe.utils.any_distro_features(d, "foo bar"):
  138. bb.fatal("foo, bar or both must be set in DISTRO_FEATURES")
  139. With just a truevalue, it can be used to include files that are meant to be
  140. used only when requested via DISTRO_FEATURES:
  141. require ${@ oe.utils.any_distro_features(d, "foo bar", "foo-or-bar.inc")
  142. """
  143. return bb.utils.contains_any("DISTRO_FEATURES", features, truevalue, falsevalue, d)
  144. def parallel_make(d, makeinst=False):
  145. """
  146. Return the integer value for the number of parallel threads to use when
  147. building, scraped out of PARALLEL_MAKE. If no parallelization option is
  148. found, returns None
  149. e.g. if PARALLEL_MAKE = "-j 10", this will return 10 as an integer.
  150. """
  151. if makeinst:
  152. pm = (d.getVar('PARALLEL_MAKEINST') or '').split()
  153. else:
  154. pm = (d.getVar('PARALLEL_MAKE') or '').split()
  155. # look for '-j' and throw other options (e.g. '-l') away
  156. while pm:
  157. opt = pm.pop(0)
  158. if opt == '-j':
  159. v = pm.pop(0)
  160. elif opt.startswith('-j'):
  161. v = opt[2:].strip()
  162. else:
  163. continue
  164. return int(v)
  165. return ''
  166. def parallel_make_argument(d, fmt, limit=None, makeinst=False):
  167. """
  168. Helper utility to construct a parallel make argument from the number of
  169. parallel threads specified in PARALLEL_MAKE.
  170. Returns the input format string `fmt` where a single '%d' will be expanded
  171. with the number of parallel threads to use. If `limit` is specified, the
  172. number of parallel threads will be no larger than it. If no parallelization
  173. option is found in PARALLEL_MAKE, returns an empty string
  174. e.g. if PARALLEL_MAKE = "-j 10", parallel_make_argument(d, "-n %d") will return
  175. "-n 10"
  176. """
  177. v = parallel_make(d, makeinst)
  178. if v:
  179. if limit:
  180. v = min(limit, v)
  181. return fmt % v
  182. return ''
  183. def packages_filter_out_system(d):
  184. """
  185. Return a list of packages from PACKAGES with the "system" packages such as
  186. PN-dbg PN-doc PN-locale-eb-gb removed.
  187. """
  188. pn = d.getVar('PN')
  189. pkgfilter = [pn + suffix for suffix in ('', '-dbg', '-dev', '-doc', '-locale', '-staticdev', '-src')]
  190. localepkg = pn + "-locale-"
  191. pkgs = []
  192. for pkg in d.getVar('PACKAGES').split():
  193. if pkg not in pkgfilter and localepkg not in pkg:
  194. pkgs.append(pkg)
  195. return pkgs
  196. def getstatusoutput(cmd):
  197. return subprocess.getstatusoutput(cmd)
  198. def trim_version(version, num_parts=2):
  199. """
  200. Return just the first <num_parts> of <version>, split by periods. For
  201. example, trim_version("1.2.3", 2) will return "1.2".
  202. """
  203. if type(version) is not str:
  204. raise TypeError("Version should be a string")
  205. if num_parts < 1:
  206. raise ValueError("Cannot split to parts < 1")
  207. parts = version.split(".")
  208. trimmed = ".".join(parts[:num_parts])
  209. return trimmed
  210. def cpu_count(at_least=1, at_most=64):
  211. cpus = len(os.sched_getaffinity(0))
  212. return max(min(cpus, at_most), at_least)
  213. def execute_pre_post_process(d, cmds):
  214. if cmds is None:
  215. return
  216. cmds = cmds.replace(";", " ")
  217. for cmd in cmds.split():
  218. bb.note("Executing %s ..." % cmd)
  219. bb.build.exec_func(cmd, d)
  220. # For each item in items, call the function 'target' with item as the first
  221. # argument, extraargs as the other arguments and handle any exceptions in the
  222. # parent thread
  223. def multiprocess_launch(target, items, d, extraargs=None):
  224. class ProcessLaunch(multiprocessing.Process):
  225. def __init__(self, *args, **kwargs):
  226. multiprocessing.Process.__init__(self, *args, **kwargs)
  227. self._pconn, self._cconn = multiprocessing.Pipe()
  228. self._exception = None
  229. self._result = None
  230. def run(self):
  231. try:
  232. ret = self._target(*self._args, **self._kwargs)
  233. self._cconn.send((None, ret))
  234. except Exception as e:
  235. tb = traceback.format_exc()
  236. self._cconn.send((e, tb))
  237. def update(self):
  238. if self._pconn.poll():
  239. (e, tb) = self._pconn.recv()
  240. if e is not None:
  241. self._exception = (e, tb)
  242. else:
  243. self._result = tb
  244. @property
  245. def exception(self):
  246. self.update()
  247. return self._exception
  248. @property
  249. def result(self):
  250. self.update()
  251. return self._result
  252. max_process = int(d.getVar("BB_NUMBER_THREADS") or os.cpu_count() or 1)
  253. launched = []
  254. errors = []
  255. results = []
  256. items = list(items)
  257. while (items and not errors) or launched:
  258. if not errors and items and len(launched) < max_process:
  259. args = (items.pop(),)
  260. if extraargs is not None:
  261. args = args + extraargs
  262. p = ProcessLaunch(target=target, args=args)
  263. p.start()
  264. launched.append(p)
  265. for q in launched:
  266. # Have to manually call update() to avoid deadlocks. The pipe can be full and
  267. # transfer stalled until we try and read the results object but the subprocess won't exit
  268. # as it still has data to write (https://bugs.python.org/issue8426)
  269. q.update()
  270. # The finished processes are joined when calling is_alive()
  271. if not q.is_alive():
  272. if q.exception:
  273. errors.append(q.exception)
  274. if q.result:
  275. results.append(q.result)
  276. launched.remove(q)
  277. # Paranoia doesn't hurt
  278. for p in launched:
  279. p.join()
  280. if errors:
  281. msg = ""
  282. for (e, tb) in errors:
  283. if isinstance(e, subprocess.CalledProcessError) and e.output:
  284. msg = msg + str(e) + "\n"
  285. msg = msg + "Subprocess output:"
  286. msg = msg + e.output.decode("utf-8", errors="ignore")
  287. else:
  288. msg = msg + str(e) + ": " + str(tb) + "\n"
  289. bb.fatal("Fatal errors occurred in subprocesses:\n%s" % msg)
  290. return results
  291. def squashspaces(string):
  292. import re
  293. return re.sub(r"\s+", " ", string).strip()
  294. def rprovides_map(pkgdata_dir, pkg_dict):
  295. # Map file -> pkg provider
  296. rprov_map = {}
  297. for pkg in pkg_dict:
  298. path_to_pkgfile = os.path.join(pkgdata_dir, 'runtime-reverse', pkg)
  299. if not os.path.isfile(path_to_pkgfile):
  300. continue
  301. with open(path_to_pkgfile) as f:
  302. for line in f:
  303. if line.startswith('RPROVIDES') or line.startswith('FILERPROVIDES'):
  304. # List all components provided by pkg.
  305. # Exclude version strings, i.e. those starting with (
  306. provides = [x for x in line.split()[1:] if not x.startswith('(')]
  307. for prov in provides:
  308. if prov in rprov_map:
  309. rprov_map[prov].append(pkg)
  310. else:
  311. rprov_map[prov] = [pkg]
  312. return rprov_map
  313. def format_pkg_list(pkg_dict, ret_format=None, pkgdata_dir=None):
  314. output = []
  315. if ret_format == "arch":
  316. for pkg in sorted(pkg_dict):
  317. output.append("%s %s" % (pkg, pkg_dict[pkg]["arch"]))
  318. elif ret_format == "file":
  319. for pkg in sorted(pkg_dict):
  320. output.append("%s %s %s" % (pkg, pkg_dict[pkg]["filename"], pkg_dict[pkg]["arch"]))
  321. elif ret_format == "ver":
  322. for pkg in sorted(pkg_dict):
  323. output.append("%s %s %s" % (pkg, pkg_dict[pkg]["arch"], pkg_dict[pkg]["ver"]))
  324. elif ret_format == "deps":
  325. rprov_map = rprovides_map(pkgdata_dir, pkg_dict)
  326. for pkg in sorted(pkg_dict):
  327. for dep in pkg_dict[pkg]["deps"]:
  328. if dep in rprov_map:
  329. # There could be multiple providers within the image
  330. for pkg_provider in rprov_map[dep]:
  331. output.append("%s|%s * %s [RPROVIDES]" % (pkg, pkg_provider, dep))
  332. else:
  333. output.append("%s|%s" % (pkg, dep))
  334. else:
  335. for pkg in sorted(pkg_dict):
  336. output.append(pkg)
  337. output_str = '\n'.join(output)
  338. if output_str:
  339. # make sure last line is newline terminated
  340. output_str += '\n'
  341. return output_str
  342. # Helper function to get the host compiler version
  343. # Do not assume the compiler is gcc
  344. def get_host_compiler_version(d, taskcontextonly=False):
  345. import re, subprocess
  346. if taskcontextonly and d.getVar('BB_WORKERCONTEXT') != '1':
  347. return
  348. compiler = d.getVar("BUILD_CC")
  349. # Get rid of ccache since it is not present when parsing.
  350. if compiler.startswith('ccache '):
  351. compiler = compiler[7:]
  352. try:
  353. env = os.environ.copy()
  354. # datastore PATH does not contain session PATH as set by environment-setup-...
  355. # this breaks the install-buildtools use-case
  356. # env["PATH"] = d.getVar("PATH")
  357. output = subprocess.check_output("%s --version" % compiler, \
  358. shell=True, env=env, stderr=subprocess.STDOUT).decode("utf-8")
  359. except subprocess.CalledProcessError as e:
  360. bb.fatal("Error running %s --version: %s" % (compiler, e.output.decode("utf-8")))
  361. match = re.match(r".* (\d+\.\d+)\.\d+.*", output.split('\n')[0])
  362. if not match:
  363. bb.fatal("Can't get compiler version from %s --version output" % compiler)
  364. version = match.group(1)
  365. return compiler, version
  366. def host_gcc_version(d, taskcontextonly=False):
  367. import re, subprocess
  368. if taskcontextonly and d.getVar('BB_WORKERCONTEXT') != '1':
  369. return
  370. compiler = d.getVar("BUILD_CC")
  371. # Get rid of ccache since it is not present when parsing.
  372. if compiler.startswith('ccache '):
  373. compiler = compiler[7:]
  374. try:
  375. env = os.environ.copy()
  376. env["PATH"] = d.getVar("PATH")
  377. output = subprocess.check_output("%s --version" % compiler, \
  378. shell=True, env=env, stderr=subprocess.STDOUT).decode("utf-8")
  379. except subprocess.CalledProcessError as e:
  380. bb.fatal("Error running %s --version: %s" % (compiler, e.output.decode("utf-8")))
  381. match = re.match(r".* (\d+\.\d+)\.\d+.*", output.split('\n')[0])
  382. if not match:
  383. bb.fatal("Can't get compiler version from %s --version output" % compiler)
  384. version = match.group(1)
  385. return "-%s" % version if version in ("4.8", "4.9") else ""
  386. def get_multilib_datastore(variant, d):
  387. localdata = bb.data.createCopy(d)
  388. if variant:
  389. overrides = localdata.getVar("OVERRIDES", False) + ":virtclass-multilib-" + variant
  390. localdata.setVar("OVERRIDES", overrides)
  391. localdata.setVar("MLPREFIX", variant + "-")
  392. else:
  393. origdefault = localdata.getVar("DEFAULTTUNE_MULTILIB_ORIGINAL")
  394. if origdefault:
  395. localdata.setVar("DEFAULTTUNE", origdefault)
  396. overrides = localdata.getVar("OVERRIDES", False).split(":")
  397. overrides = ":".join([x for x in overrides if not x.startswith("virtclass-multilib-")])
  398. localdata.setVar("OVERRIDES", overrides)
  399. localdata.setVar("MLPREFIX", "")
  400. return localdata
  401. class ImageQAFailed(Exception):
  402. def __init__(self, description, name=None, logfile=None):
  403. self.description = description
  404. self.name = name
  405. self.logfile=logfile
  406. def __str__(self):
  407. msg = 'Function failed: %s' % self.name
  408. if self.description:
  409. msg = msg + ' (%s)' % self.description
  410. return msg
  411. def sh_quote(string):
  412. import shlex
  413. return shlex.quote(string)
  414. def directory_size(root, blocksize=4096):
  415. """
  416. Calculate the size of the directory, taking into account hard links,
  417. rounding up every size to multiples of the blocksize.
  418. """
  419. def roundup(size):
  420. """
  421. Round the size up to the nearest multiple of the block size.
  422. """
  423. import math
  424. return math.ceil(size / blocksize) * blocksize
  425. def getsize(filename):
  426. """
  427. Get the size of the filename, not following symlinks, taking into
  428. account hard links.
  429. """
  430. stat = os.lstat(filename)
  431. if stat.st_ino not in inodes:
  432. inodes.add(stat.st_ino)
  433. return stat.st_size
  434. else:
  435. return 0
  436. inodes = set()
  437. total = 0
  438. for root, dirs, files in os.walk(root):
  439. total += sum(roundup(getsize(os.path.join(root, name))) for name in files)
  440. total += roundup(getsize(root))
  441. return total