utils.py 16 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484
  1. import subprocess
  2. import multiprocessing
  3. import traceback
  4. def read_file(filename):
  5. try:
  6. f = open( filename, "r" )
  7. except IOError as reason:
  8. return "" # WARNING: can't raise an error now because of the new RDEPENDS handling. This is a bit ugly. :M:
  9. else:
  10. data = f.read().strip()
  11. f.close()
  12. return data
  13. return None
  14. def ifelse(condition, iftrue = True, iffalse = False):
  15. if condition:
  16. return iftrue
  17. else:
  18. return iffalse
  19. def conditional(variable, checkvalue, truevalue, falsevalue, d):
  20. if d.getVar(variable) == checkvalue:
  21. return truevalue
  22. else:
  23. return falsevalue
  24. def vartrue(var, iftrue, iffalse, d):
  25. import oe.types
  26. if oe.types.boolean(d.getVar(var)):
  27. return iftrue
  28. else:
  29. return iffalse
  30. def less_or_equal(variable, checkvalue, truevalue, falsevalue, d):
  31. if float(d.getVar(variable)) <= float(checkvalue):
  32. return truevalue
  33. else:
  34. return falsevalue
  35. def version_less_or_equal(variable, checkvalue, truevalue, falsevalue, d):
  36. result = bb.utils.vercmp_string(d.getVar(variable), checkvalue)
  37. if result <= 0:
  38. return truevalue
  39. else:
  40. return falsevalue
  41. def both_contain(variable1, variable2, checkvalue, d):
  42. val1 = d.getVar(variable1)
  43. val2 = d.getVar(variable2)
  44. val1 = set(val1.split())
  45. val2 = set(val2.split())
  46. if isinstance(checkvalue, str):
  47. checkvalue = set(checkvalue.split())
  48. else:
  49. checkvalue = set(checkvalue)
  50. if checkvalue.issubset(val1) and checkvalue.issubset(val2):
  51. return " ".join(checkvalue)
  52. else:
  53. return ""
  54. def set_intersect(variable1, variable2, d):
  55. """
  56. Expand both variables, interpret them as lists of strings, and return the
  57. intersection as a flattened string.
  58. For example:
  59. s1 = "a b c"
  60. s2 = "b c d"
  61. s3 = set_intersect(s1, s2)
  62. => s3 = "b c"
  63. """
  64. val1 = set(d.getVar(variable1).split())
  65. val2 = set(d.getVar(variable2).split())
  66. return " ".join(val1 & val2)
  67. def prune_suffix(var, suffixes, d):
  68. # See if var ends with any of the suffixes listed and
  69. # remove it if found
  70. for suffix in suffixes:
  71. if var.endswith(suffix):
  72. var = var.replace(suffix, "")
  73. prefix = d.getVar("MLPREFIX")
  74. if prefix and var.startswith(prefix):
  75. var = var.replace(prefix, "")
  76. return var
  77. def str_filter(f, str, d):
  78. from re import match
  79. return " ".join([x for x in str.split() if match(f, x, 0)])
  80. def str_filter_out(f, str, d):
  81. from re import match
  82. return " ".join([x for x in str.split() if not match(f, x, 0)])
  83. def build_depends_string(depends, task):
  84. """Append a taskname to a string of dependencies as used by the [depends] flag"""
  85. return " ".join(dep + ":" + task for dep in depends.split())
  86. def inherits(d, *classes):
  87. """Return True if the metadata inherits any of the specified classes"""
  88. return any(bb.data.inherits_class(cls, d) for cls in classes)
  89. def features_backfill(var,d):
  90. # This construct allows the addition of new features to variable specified
  91. # as var
  92. # Example for var = "DISTRO_FEATURES"
  93. # This construct allows the addition of new features to DISTRO_FEATURES
  94. # that if not present would disable existing functionality, without
  95. # disturbing distributions that have already set DISTRO_FEATURES.
  96. # Distributions wanting to elide a value in DISTRO_FEATURES_BACKFILL should
  97. # add the feature to DISTRO_FEATURES_BACKFILL_CONSIDERED
  98. features = (d.getVar(var) or "").split()
  99. backfill = (d.getVar(var+"_BACKFILL") or "").split()
  100. considered = (d.getVar(var+"_BACKFILL_CONSIDERED") or "").split()
  101. addfeatures = []
  102. for feature in backfill:
  103. if feature not in features and feature not in considered:
  104. addfeatures.append(feature)
  105. if addfeatures:
  106. d.appendVar(var, " " + " ".join(addfeatures))
  107. def all_distro_features(d, features, truevalue="1", falsevalue=""):
  108. """
  109. Returns truevalue if *all* given features are set in DISTRO_FEATURES,
  110. else falsevalue. The features can be given as single string or anything
  111. that can be turned into a set.
  112. This is a shorter, more flexible version of
  113. bb.utils.contains("DISTRO_FEATURES", features, truevalue, falsevalue, d).
  114. Without explicit true/false values it can be used directly where
  115. Python expects a boolean:
  116. if oe.utils.all_distro_features(d, "foo bar"):
  117. bb.fatal("foo and bar are mutually exclusive DISTRO_FEATURES")
  118. With just a truevalue, it can be used to include files that are meant to be
  119. used only when requested via DISTRO_FEATURES:
  120. require ${@ oe.utils.all_distro_features(d, "foo bar", "foo-and-bar.inc")
  121. """
  122. return bb.utils.contains("DISTRO_FEATURES", features, truevalue, falsevalue, d)
  123. def any_distro_features(d, features, truevalue="1", falsevalue=""):
  124. """
  125. Returns truevalue if at least *one* of the given features is set in DISTRO_FEATURES,
  126. else falsevalue. The features can be given as single string or anything
  127. that can be turned into a set.
  128. This is a shorter, more flexible version of
  129. bb.utils.contains_any("DISTRO_FEATURES", features, truevalue, falsevalue, d).
  130. Without explicit true/false values it can be used directly where
  131. Python expects a boolean:
  132. if not oe.utils.any_distro_features(d, "foo bar"):
  133. bb.fatal("foo, bar or both must be set in DISTRO_FEATURES")
  134. With just a truevalue, it can be used to include files that are meant to be
  135. used only when requested via DISTRO_FEATURES:
  136. require ${@ oe.utils.any_distro_features(d, "foo bar", "foo-or-bar.inc")
  137. """
  138. return bb.utils.contains_any("DISTRO_FEATURES", features, truevalue, falsevalue, d)
  139. def parallel_make(d):
  140. """
  141. Return the integer value for the number of parallel threads to use when
  142. building, scraped out of PARALLEL_MAKE. If no parallelization option is
  143. found, returns None
  144. e.g. if PARALLEL_MAKE = "-j 10", this will return 10 as an integer.
  145. """
  146. pm = (d.getVar('PARALLEL_MAKE') or '').split()
  147. # look for '-j' and throw other options (e.g. '-l') away
  148. while pm:
  149. opt = pm.pop(0)
  150. if opt == '-j':
  151. v = pm.pop(0)
  152. elif opt.startswith('-j'):
  153. v = opt[2:].strip()
  154. else:
  155. continue
  156. return int(v)
  157. return None
  158. def parallel_make_argument(d, fmt, limit=None):
  159. """
  160. Helper utility to construct a parallel make argument from the number of
  161. parallel threads specified in PARALLEL_MAKE.
  162. Returns the input format string `fmt` where a single '%d' will be expanded
  163. with the number of parallel threads to use. If `limit` is specified, the
  164. number of parallel threads will be no larger than it. If no parallelization
  165. option is found in PARALLEL_MAKE, returns an empty string
  166. e.g. if PARALLEL_MAKE = "-j 10", parallel_make_argument(d, "-n %d") will return
  167. "-n 10"
  168. """
  169. v = parallel_make(d)
  170. if v:
  171. if limit:
  172. v = min(limit, v)
  173. return fmt % v
  174. return ''
  175. def packages_filter_out_system(d):
  176. """
  177. Return a list of packages from PACKAGES with the "system" packages such as
  178. PN-dbg PN-doc PN-locale-eb-gb removed.
  179. """
  180. pn = d.getVar('PN')
  181. blacklist = [pn + suffix for suffix in ('', '-dbg', '-dev', '-doc', '-locale', '-staticdev')]
  182. localepkg = pn + "-locale-"
  183. pkgs = []
  184. for pkg in d.getVar('PACKAGES').split():
  185. if pkg not in blacklist and localepkg not in pkg:
  186. pkgs.append(pkg)
  187. return pkgs
  188. def getstatusoutput(cmd):
  189. return subprocess.getstatusoutput(cmd)
  190. def trim_version(version, num_parts=2):
  191. """
  192. Return just the first <num_parts> of <version>, split by periods. For
  193. example, trim_version("1.2.3", 2) will return "1.2".
  194. """
  195. if type(version) is not str:
  196. raise TypeError("Version should be a string")
  197. if num_parts < 1:
  198. raise ValueError("Cannot split to parts < 1")
  199. parts = version.split(".")
  200. trimmed = ".".join(parts[:num_parts])
  201. return trimmed
  202. def cpu_count():
  203. import multiprocessing
  204. return multiprocessing.cpu_count()
  205. def execute_pre_post_process(d, cmds):
  206. if cmds is None:
  207. return
  208. for cmd in cmds.strip().split(';'):
  209. cmd = cmd.strip()
  210. if cmd != '':
  211. bb.note("Executing %s ..." % cmd)
  212. bb.build.exec_func(cmd, d)
  213. # For each item in items, call the function 'target' with item as the first
  214. # argument, extraargs as the other arguments and handle any exceptions in the
  215. # parent thread
  216. def multiprocess_launch(target, items, d, extraargs=None):
  217. class ProcessLaunch(multiprocessing.Process):
  218. def __init__(self, *args, **kwargs):
  219. multiprocessing.Process.__init__(self, *args, **kwargs)
  220. self._pconn, self._cconn = multiprocessing.Pipe()
  221. self._exception = None
  222. self._result = None
  223. def run(self):
  224. try:
  225. ret = self._target(*self._args, **self._kwargs)
  226. self._cconn.send((None, ret))
  227. except Exception as e:
  228. tb = traceback.format_exc()
  229. self._cconn.send((e, tb))
  230. def update(self):
  231. if self._pconn.poll():
  232. (e, tb) = self._pconn.recv()
  233. if e is not None:
  234. self._exception = (e, tb)
  235. else:
  236. self._result = tb
  237. @property
  238. def exception(self):
  239. self.update()
  240. return self._exception
  241. @property
  242. def result(self):
  243. self.update()
  244. return self._result
  245. max_process = int(d.getVar("BB_NUMBER_THREADS") or os.cpu_count() or 1)
  246. launched = []
  247. errors = []
  248. results = []
  249. items = list(items)
  250. while (items and not errors) or launched:
  251. if not errors and items and len(launched) < max_process:
  252. args = (items.pop(),)
  253. if extraargs is not None:
  254. args = args + extraargs
  255. p = ProcessLaunch(target=target, args=args)
  256. p.start()
  257. launched.append(p)
  258. for q in launched:
  259. # The finished processes are joined when calling is_alive()
  260. if not q.is_alive():
  261. if q.exception:
  262. errors.append(q.exception)
  263. if q.result:
  264. results.append(q.result)
  265. launched.remove(q)
  266. # Paranoia doesn't hurt
  267. for p in launched:
  268. p.join()
  269. if errors:
  270. msg = ""
  271. for (e, tb) in errors:
  272. msg = msg + str(e) + ": " + str(tb) + "\n"
  273. bb.fatal("Fatal errors occurred in subprocesses:\n%s" % msg)
  274. return results
  275. def squashspaces(string):
  276. import re
  277. return re.sub("\s+", " ", string).strip()
  278. def format_pkg_list(pkg_dict, ret_format=None):
  279. output = []
  280. if ret_format == "arch":
  281. for pkg in sorted(pkg_dict):
  282. output.append("%s %s" % (pkg, pkg_dict[pkg]["arch"]))
  283. elif ret_format == "file":
  284. for pkg in sorted(pkg_dict):
  285. output.append("%s %s %s" % (pkg, pkg_dict[pkg]["filename"], pkg_dict[pkg]["arch"]))
  286. elif ret_format == "ver":
  287. for pkg in sorted(pkg_dict):
  288. output.append("%s %s %s" % (pkg, pkg_dict[pkg]["arch"], pkg_dict[pkg]["ver"]))
  289. elif ret_format == "deps":
  290. for pkg in sorted(pkg_dict):
  291. for dep in pkg_dict[pkg]["deps"]:
  292. output.append("%s|%s" % (pkg, dep))
  293. else:
  294. for pkg in sorted(pkg_dict):
  295. output.append(pkg)
  296. output_str = '\n'.join(output)
  297. if output_str:
  298. # make sure last line is newline terminated
  299. output_str += '\n'
  300. return output_str
  301. def host_gcc_version(d, taskcontextonly=False):
  302. import re, subprocess
  303. if taskcontextonly and d.getVar('BB_WORKERCONTEXT') != '1':
  304. return
  305. compiler = d.getVar("BUILD_CC")
  306. try:
  307. env = os.environ.copy()
  308. env["PATH"] = d.getVar("PATH")
  309. output = subprocess.check_output("%s --version" % compiler, shell=True, env=env).decode("utf-8")
  310. except subprocess.CalledProcessError as e:
  311. bb.fatal("Error running %s --version: %s" % (compiler, e.output.decode("utf-8")))
  312. match = re.match(".* (\d\.\d)\.\d.*", output.split('\n')[0])
  313. if not match:
  314. bb.fatal("Can't get compiler version from %s --version output" % compiler)
  315. version = match.group(1)
  316. return "-%s" % version if version in ("4.8", "4.9") else ""
  317. def get_multilib_datastore(variant, d):
  318. localdata = bb.data.createCopy(d)
  319. if variant:
  320. overrides = localdata.getVar("OVERRIDES", False) + ":virtclass-multilib-" + variant
  321. localdata.setVar("OVERRIDES", overrides)
  322. localdata.setVar("MLPREFIX", variant + "-")
  323. else:
  324. origdefault = localdata.getVar("DEFAULTTUNE_MULTILIB_ORIGINAL")
  325. if origdefault:
  326. localdata.setVar("DEFAULTTUNE", origdefault)
  327. overrides = localdata.getVar("OVERRIDES", False).split(":")
  328. overrides = ":".join([x for x in overrides if not x.startswith("virtclass-multilib-")])
  329. localdata.setVar("OVERRIDES", overrides)
  330. localdata.setVar("MLPREFIX", "")
  331. return localdata
  332. #
  333. # Python 2.7 doesn't have threaded pools (just multiprocessing)
  334. # so implement a version here
  335. #
  336. from queue import Queue
  337. from threading import Thread
  338. class ThreadedWorker(Thread):
  339. """Thread executing tasks from a given tasks queue"""
  340. def __init__(self, tasks, worker_init, worker_end):
  341. Thread.__init__(self)
  342. self.tasks = tasks
  343. self.daemon = True
  344. self.worker_init = worker_init
  345. self.worker_end = worker_end
  346. def run(self):
  347. from queue import Empty
  348. if self.worker_init is not None:
  349. self.worker_init(self)
  350. while True:
  351. try:
  352. func, args, kargs = self.tasks.get(block=False)
  353. except Empty:
  354. if self.worker_end is not None:
  355. self.worker_end(self)
  356. break
  357. try:
  358. func(self, *args, **kargs)
  359. except Exception as e:
  360. print(e)
  361. finally:
  362. self.tasks.task_done()
  363. class ThreadedPool:
  364. """Pool of threads consuming tasks from a queue"""
  365. def __init__(self, num_workers, num_tasks, worker_init=None,
  366. worker_end=None):
  367. self.tasks = Queue(num_tasks)
  368. self.workers = []
  369. for _ in range(num_workers):
  370. worker = ThreadedWorker(self.tasks, worker_init, worker_end)
  371. self.workers.append(worker)
  372. def start(self):
  373. for worker in self.workers:
  374. worker.start()
  375. def add_task(self, func, *args, **kargs):
  376. """Add a task to the queue"""
  377. self.tasks.put((func, args, kargs))
  378. def wait_completion(self):
  379. """Wait for completion of all the tasks in the queue"""
  380. self.tasks.join()
  381. for worker in self.workers:
  382. worker.join()
  383. def write_ld_so_conf(d):
  384. # Some utils like prelink may not have the correct target library paths
  385. # so write an ld.so.conf to help them
  386. ldsoconf = d.expand("${STAGING_DIR_TARGET}${sysconfdir}/ld.so.conf")
  387. if os.path.exists(ldsoconf):
  388. bb.utils.remove(ldsoconf)
  389. bb.utils.mkdirhier(os.path.dirname(ldsoconf))
  390. with open(ldsoconf, "w") as f:
  391. f.write(d.getVar("base_libdir") + '\n')
  392. f.write(d.getVar("libdir") + '\n')
  393. class ImageQAFailed(bb.build.FuncFailed):
  394. def __init__(self, description, name=None, logfile=None):
  395. self.description = description
  396. self.name = name
  397. self.logfile=logfile
  398. def __str__(self):
  399. msg = 'Function failed: %s' % self.name
  400. if self.description:
  401. msg = msg + ' (%s)' % self.description
  402. return msg