cooker.py 47 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246
  1. #!/usr/bin/env python
  2. # ex:ts=4:sw=4:sts=4:et
  3. # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
  4. #
  5. # Copyright (C) 2003, 2004 Chris Larson
  6. # Copyright (C) 2003, 2004 Phil Blundell
  7. # Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
  8. # Copyright (C) 2005 Holger Hans Peter Freyther
  9. # Copyright (C) 2005 ROAD GmbH
  10. # Copyright (C) 2006 - 2007 Richard Purdie
  11. #
  12. # This program is free software; you can redistribute it and/or modify
  13. # it under the terms of the GNU General Public License version 2 as
  14. # published by the Free Software Foundation.
  15. #
  16. # This program is distributed in the hope that it will be useful,
  17. # but WITHOUT ANY WARRANTY; without even the implied warranty of
  18. # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  19. # GNU General Public License for more details.
  20. #
  21. # You should have received a copy of the GNU General Public License along
  22. # with this program; if not, write to the Free Software Foundation, Inc.,
  23. # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
  24. from __future__ import print_function
  25. import sys, os, glob, os.path, re, time
  26. import atexit
  27. import itertools
  28. import logging
  29. import multiprocessing
  30. import signal
  31. import sre_constants
  32. import threading
  33. from cStringIO import StringIO
  34. from contextlib import closing
  35. import bb
  36. from bb import utils, data, parse, event, cache, providers, taskdata, command, runqueue
  37. logger = logging.getLogger("BitBake")
  38. collectlog = logging.getLogger("BitBake.Collection")
  39. buildlog = logging.getLogger("BitBake.Build")
  40. parselog = logging.getLogger("BitBake.Parsing")
  41. providerlog = logging.getLogger("BitBake.Provider")
  42. class MultipleMatches(Exception):
  43. """
  44. Exception raised when multiple file matches are found
  45. """
  46. class NothingToBuild(Exception):
  47. """
  48. Exception raised when there is nothing to build
  49. """
  50. class state:
  51. initial, parsing, running, shutdown, stop = range(5)
  52. #============================================================================#
  53. # BBCooker
  54. #============================================================================#
  55. class BBCooker:
  56. """
  57. Manages one bitbake build run
  58. """
  59. def __init__(self, configuration, server):
  60. self.status = None
  61. self.appendlist = {}
  62. if server:
  63. self.server = server.BitBakeServer(self)
  64. self.configuration = configuration
  65. self.caches_array = []
  66. # Currently, only Image Creator hob ui needs extra cache.
  67. # So, we save Extra Cache class name and container file
  68. # information into a extraCaches field in hob UI.
  69. # TODO: In future, bin/bitbake should pass information into cooker,
  70. # instead of getting information from configuration.ui. Also, some
  71. # UI start up issues need to be addressed at the same time.
  72. caches_name_array = ['bb.cache:CoreRecipeInfo']
  73. if configuration.ui:
  74. try:
  75. module = __import__('bb.ui', fromlist=[configuration.ui])
  76. name_array = (getattr(module, configuration.ui)).extraCaches
  77. for recipeInfoName in name_array:
  78. caches_name_array.append(recipeInfoName)
  79. except ImportError, exc:
  80. # bb.ui.XXX is not defined and imported. It's an error!
  81. logger.critical("Unable to import '%s' interface from bb.ui: %s" % (configuration.ui, exc))
  82. sys.exit("FATAL: Failed to import '%s' interface." % configuration.ui)
  83. except AttributeError:
  84. # This is not an error. If the field is not defined in the ui,
  85. # this interface might need no extra cache fields, so
  86. # just skip this error!
  87. logger.debug("UI '%s' does not require extra cache!" % (configuration.ui))
  88. # At least CoreRecipeInfo will be loaded, so caches_array will never be empty!
  89. # This is the entry point, no further check needed!
  90. for var in caches_name_array:
  91. try:
  92. module_name, cache_name = var.split(':')
  93. module = __import__(module_name, fromlist=(cache_name,))
  94. self.caches_array.append(getattr(module, cache_name))
  95. except ImportError, exc:
  96. logger.critical("Unable to import extra RecipeInfo '%s' from '%s': %s" % (cache_name, module_name, exc))
  97. sys.exit("FATAL: Failed to import extra cache class '%s'." % cache_name)
  98. self.configuration.data = bb.data.init()
  99. if not server:
  100. bb.data.setVar("BB_WORKERCONTEXT", "1", self.configuration.data)
  101. bb.data.inheritFromOS(self.configuration.data)
  102. self.parseConfigurationFiles(self.configuration.file)
  103. if not self.configuration.cmd:
  104. self.configuration.cmd = bb.data.getVar("BB_DEFAULT_TASK", self.configuration.data, True) or "build"
  105. bbpkgs = bb.data.getVar('BBPKGS', self.configuration.data, True)
  106. if bbpkgs and len(self.configuration.pkgs_to_build) == 0:
  107. self.configuration.pkgs_to_build.extend(bbpkgs.split())
  108. #
  109. # Special updated configuration we use for firing events
  110. #
  111. self.configuration.event_data = bb.data.createCopy(self.configuration.data)
  112. bb.data.update_data(self.configuration.event_data)
  113. # TOSTOP must not be set or our children will hang when they output
  114. fd = sys.stdout.fileno()
  115. if os.isatty(fd):
  116. import termios
  117. tcattr = termios.tcgetattr(fd)
  118. if tcattr[3] & termios.TOSTOP:
  119. buildlog.info("The terminal had the TOSTOP bit set, clearing...")
  120. tcattr[3] = tcattr[3] & ~termios.TOSTOP
  121. termios.tcsetattr(fd, termios.TCSANOW, tcattr)
  122. self.command = bb.command.Command(self)
  123. self.state = state.initial
  124. def parseConfiguration(self):
  125. # Change nice level if we're asked to
  126. nice = bb.data.getVar("BB_NICE_LEVEL", self.configuration.data, True)
  127. if nice:
  128. curnice = os.nice(0)
  129. nice = int(nice) - curnice
  130. buildlog.verbose("Renice to %s " % os.nice(nice))
  131. def parseCommandLine(self):
  132. # Parse any commandline into actions
  133. if self.configuration.show_environment:
  134. self.commandlineAction = None
  135. if 'world' in self.configuration.pkgs_to_build:
  136. buildlog.error("'world' is not a valid target for --environment.")
  137. if 'universe' in self.configuration.pkgs_to_build:
  138. buildlog.error("'universe' is not a valid target for --environment.")
  139. elif len(self.configuration.pkgs_to_build) > 1:
  140. buildlog.error("Only one target can be used with the --environment option.")
  141. elif self.configuration.buildfile and len(self.configuration.pkgs_to_build) > 0:
  142. buildlog.error("No target should be used with the --environment and --buildfile options.")
  143. elif len(self.configuration.pkgs_to_build) > 0:
  144. self.commandlineAction = ["showEnvironmentTarget", self.configuration.pkgs_to_build]
  145. else:
  146. self.commandlineAction = ["showEnvironment", self.configuration.buildfile]
  147. elif self.configuration.buildfile is not None:
  148. self.commandlineAction = ["buildFile", self.configuration.buildfile, self.configuration.cmd]
  149. elif self.configuration.revisions_changed:
  150. self.commandlineAction = ["compareRevisions"]
  151. elif self.configuration.show_versions:
  152. self.commandlineAction = ["showVersions"]
  153. elif self.configuration.parse_only:
  154. self.commandlineAction = ["parseFiles"]
  155. elif self.configuration.dot_graph:
  156. if self.configuration.pkgs_to_build:
  157. self.commandlineAction = ["generateDotGraph", self.configuration.pkgs_to_build, self.configuration.cmd]
  158. else:
  159. self.commandlineAction = None
  160. buildlog.error("Please specify a package name for dependency graph generation.")
  161. else:
  162. if self.configuration.pkgs_to_build:
  163. self.commandlineAction = ["buildTargets", self.configuration.pkgs_to_build, self.configuration.cmd]
  164. else:
  165. self.commandlineAction = None
  166. buildlog.error("Nothing to do. Use 'bitbake world' to build everything, or run 'bitbake --help' for usage information.")
  167. def runCommands(self, server, data, abort):
  168. """
  169. Run any queued asynchronous command
  170. This is done by the idle handler so it runs in true context rather than
  171. tied to any UI.
  172. """
  173. return self.command.runAsyncCommand()
  174. def showVersions(self):
  175. # Need files parsed
  176. self.updateCache()
  177. pkg_pn = self.status.pkg_pn
  178. preferred_versions = {}
  179. latest_versions = {}
  180. # Sort by priority
  181. for pn in pkg_pn:
  182. (last_ver, last_file, pref_ver, pref_file) = bb.providers.findBestProvider(pn, self.configuration.data, self.status)
  183. preferred_versions[pn] = (pref_ver, pref_file)
  184. latest_versions[pn] = (last_ver, last_file)
  185. logger.plain("%-35s %25s %25s", "Package Name", "Latest Version", "Preferred Version")
  186. logger.plain("%-35s %25s %25s\n", "============", "==============", "=================")
  187. for p in sorted(pkg_pn):
  188. pref = preferred_versions[p]
  189. latest = latest_versions[p]
  190. prefstr = pref[0][0] + ":" + pref[0][1] + '-' + pref[0][2]
  191. lateststr = latest[0][0] + ":" + latest[0][1] + "-" + latest[0][2]
  192. if pref == latest:
  193. prefstr = ""
  194. logger.plain("%-35s %25s %25s", p, lateststr, prefstr)
  195. def showEnvironment(self, buildfile = None, pkgs_to_build = []):
  196. """
  197. Show the outer or per-package environment
  198. """
  199. fn = None
  200. envdata = None
  201. if buildfile:
  202. fn = self.matchFile(buildfile)
  203. elif len(pkgs_to_build) == 1:
  204. self.updateCache()
  205. localdata = data.createCopy(self.configuration.data)
  206. bb.data.update_data(localdata)
  207. bb.data.expandKeys(localdata)
  208. taskdata = bb.taskdata.TaskData(self.configuration.abort)
  209. taskdata.add_provider(localdata, self.status, pkgs_to_build[0])
  210. taskdata.add_unresolved(localdata, self.status)
  211. targetid = taskdata.getbuild_id(pkgs_to_build[0])
  212. fnid = taskdata.build_targets[targetid][0]
  213. fn = taskdata.fn_index[fnid]
  214. else:
  215. envdata = self.configuration.data
  216. if fn:
  217. try:
  218. envdata = bb.cache.Cache.loadDataFull(fn, self.get_file_appends(fn), self.configuration.data)
  219. except Exception, e:
  220. parselog.exception("Unable to read %s", fn)
  221. raise
  222. # emit variables and shell functions
  223. data.update_data(envdata)
  224. with closing(StringIO()) as env:
  225. data.emit_env(env, envdata, True)
  226. logger.plain(env.getvalue())
  227. # emit the metadata which isnt valid shell
  228. data.expandKeys(envdata)
  229. for e in envdata.keys():
  230. if data.getVarFlag( e, 'python', envdata ):
  231. logger.plain("\npython %s () {\n%s}\n", e, data.getVar(e, envdata, 1))
  232. def prepareTreeData(self, pkgs_to_build, task):
  233. """
  234. Prepare a runqueue and taskdata object for iteration over pkgs_to_build
  235. """
  236. # Need files parsed
  237. self.updateCache()
  238. # If we are told to do the None task then query the default task
  239. if (task == None):
  240. task = self.configuration.cmd
  241. pkgs_to_build = self.checkPackages(pkgs_to_build)
  242. localdata = data.createCopy(self.configuration.data)
  243. bb.data.update_data(localdata)
  244. bb.data.expandKeys(localdata)
  245. # We set abort to False here to prevent unbuildable targets raising
  246. # an exception when we're just generating data
  247. taskdata = bb.taskdata.TaskData(False)
  248. runlist = []
  249. for k in pkgs_to_build:
  250. taskdata.add_provider(localdata, self.status, k)
  251. runlist.append([k, "do_%s" % task])
  252. taskdata.add_unresolved(localdata, self.status)
  253. rq = bb.runqueue.RunQueue(self, self.configuration.data, self.status, taskdata, runlist)
  254. rq.rqdata.prepare()
  255. return taskdata, rq
  256. def generateDepTreeData(self, pkgs_to_build, task):
  257. """
  258. Create a dependency tree of pkgs_to_build, returning the data.
  259. """
  260. taskdata, rq = self.prepareTreeData(pkgs_to_build, task)
  261. seen_fnids = []
  262. depend_tree = {}
  263. depend_tree["depends"] = {}
  264. depend_tree["tdepends"] = {}
  265. depend_tree["pn"] = {}
  266. depend_tree["rdepends-pn"] = {}
  267. depend_tree["packages"] = {}
  268. depend_tree["rdepends-pkg"] = {}
  269. depend_tree["rrecs-pkg"] = {}
  270. for task in xrange(len(rq.rqdata.runq_fnid)):
  271. taskname = rq.rqdata.runq_task[task]
  272. fnid = rq.rqdata.runq_fnid[task]
  273. fn = taskdata.fn_index[fnid]
  274. pn = self.status.pkg_fn[fn]
  275. version = "%s:%s-%s" % self.status.pkg_pepvpr[fn]
  276. if pn not in depend_tree["pn"]:
  277. depend_tree["pn"][pn] = {}
  278. depend_tree["pn"][pn]["filename"] = fn
  279. depend_tree["pn"][pn]["version"] = version
  280. for dep in rq.rqdata.runq_depends[task]:
  281. depfn = taskdata.fn_index[rq.rqdata.runq_fnid[dep]]
  282. deppn = self.status.pkg_fn[depfn]
  283. dotname = "%s.%s" % (pn, rq.rqdata.runq_task[task])
  284. if not dotname in depend_tree["tdepends"]:
  285. depend_tree["tdepends"][dotname] = []
  286. depend_tree["tdepends"][dotname].append("%s.%s" % (deppn, rq.rqdata.runq_task[dep]))
  287. if fnid not in seen_fnids:
  288. seen_fnids.append(fnid)
  289. packages = []
  290. depend_tree["depends"][pn] = []
  291. for dep in taskdata.depids[fnid]:
  292. depend_tree["depends"][pn].append(taskdata.build_names_index[dep])
  293. depend_tree["rdepends-pn"][pn] = []
  294. for rdep in taskdata.rdepids[fnid]:
  295. depend_tree["rdepends-pn"][pn].append(taskdata.run_names_index[rdep])
  296. rdepends = self.status.rundeps[fn]
  297. for package in rdepends:
  298. depend_tree["rdepends-pkg"][package] = []
  299. for rdepend in rdepends[package]:
  300. depend_tree["rdepends-pkg"][package].append(rdepend)
  301. packages.append(package)
  302. rrecs = self.status.runrecs[fn]
  303. for package in rrecs:
  304. depend_tree["rrecs-pkg"][package] = []
  305. for rdepend in rrecs[package]:
  306. depend_tree["rrecs-pkg"][package].append(rdepend)
  307. if not package in packages:
  308. packages.append(package)
  309. for package in packages:
  310. if package not in depend_tree["packages"]:
  311. depend_tree["packages"][package] = {}
  312. depend_tree["packages"][package]["pn"] = pn
  313. depend_tree["packages"][package]["filename"] = fn
  314. depend_tree["packages"][package]["version"] = version
  315. return depend_tree
  316. def generateDepTreeEvent(self, pkgs_to_build, task):
  317. """
  318. Create a task dependency graph of pkgs_to_build.
  319. Generate an event with the result
  320. """
  321. depgraph = self.generateDepTreeData(pkgs_to_build, task)
  322. bb.event.fire(bb.event.DepTreeGenerated(depgraph), self.configuration.data)
  323. def generateDotGraphFiles(self, pkgs_to_build, task):
  324. """
  325. Create a task dependency graph of pkgs_to_build.
  326. Save the result to a set of .dot files.
  327. """
  328. depgraph = self.generateDepTreeData(pkgs_to_build, task)
  329. # Prints a flattened form of package-depends below where subpackages of a package are merged into the main pn
  330. depends_file = file('pn-depends.dot', 'w' )
  331. print("digraph depends {", file=depends_file)
  332. for pn in depgraph["pn"]:
  333. fn = depgraph["pn"][pn]["filename"]
  334. version = depgraph["pn"][pn]["version"]
  335. print('"%s" [label="%s %s\\n%s"]' % (pn, pn, version, fn), file=depends_file)
  336. for pn in depgraph["depends"]:
  337. for depend in depgraph["depends"][pn]:
  338. print('"%s" -> "%s"' % (pn, depend), file=depends_file)
  339. for pn in depgraph["rdepends-pn"]:
  340. for rdepend in depgraph["rdepends-pn"][pn]:
  341. print('"%s" -> "%s" [style=dashed]' % (pn, rdepend), file=depends_file)
  342. print("}", file=depends_file)
  343. logger.info("PN dependencies saved to 'pn-depends.dot'")
  344. depends_file = file('package-depends.dot', 'w' )
  345. print("digraph depends {", file=depends_file)
  346. for package in depgraph["packages"]:
  347. pn = depgraph["packages"][package]["pn"]
  348. fn = depgraph["packages"][package]["filename"]
  349. version = depgraph["packages"][package]["version"]
  350. if package == pn:
  351. print('"%s" [label="%s %s\\n%s"]' % (pn, pn, version, fn), file=depends_file)
  352. else:
  353. print('"%s" [label="%s(%s) %s\\n%s"]' % (package, package, pn, version, fn), file=depends_file)
  354. for depend in depgraph["depends"][pn]:
  355. print('"%s" -> "%s"' % (package, depend), file=depends_file)
  356. for package in depgraph["rdepends-pkg"]:
  357. for rdepend in depgraph["rdepends-pkg"][package]:
  358. print('"%s" -> "%s" [style=dashed]' % (package, rdepend), file=depends_file)
  359. for package in depgraph["rrecs-pkg"]:
  360. for rdepend in depgraph["rrecs-pkg"][package]:
  361. print('"%s" -> "%s" [style=dashed]' % (package, rdepend), file=depends_file)
  362. print("}", file=depends_file)
  363. logger.info("Package dependencies saved to 'package-depends.dot'")
  364. tdepends_file = file('task-depends.dot', 'w' )
  365. print("digraph depends {", file=tdepends_file)
  366. for task in depgraph["tdepends"]:
  367. (pn, taskname) = task.rsplit(".", 1)
  368. fn = depgraph["pn"][pn]["filename"]
  369. version = depgraph["pn"][pn]["version"]
  370. print('"%s.%s" [label="%s %s\\n%s\\n%s"]' % (pn, taskname, pn, taskname, version, fn), file=tdepends_file)
  371. for dep in depgraph["tdepends"][task]:
  372. print('"%s" -> "%s"' % (task, dep), file=tdepends_file)
  373. print("}", file=tdepends_file)
  374. logger.info("Task dependencies saved to 'task-depends.dot'")
  375. def calc_bbfile_priority( self, filename, matched = None ):
  376. for _, _, regex, pri in self.status.bbfile_config_priorities:
  377. if regex.match(filename):
  378. if matched != None:
  379. if not regex in matched:
  380. matched.add(regex)
  381. return pri
  382. return 0
  383. def buildDepgraph( self ):
  384. all_depends = self.status.all_depends
  385. pn_provides = self.status.pn_provides
  386. localdata = data.createCopy(self.configuration.data)
  387. bb.data.update_data(localdata)
  388. bb.data.expandKeys(localdata)
  389. # Handle PREFERRED_PROVIDERS
  390. for p in (bb.data.getVar('PREFERRED_PROVIDERS', localdata, 1) or "").split():
  391. try:
  392. (providee, provider) = p.split(':')
  393. except:
  394. providerlog.critical("Malformed option in PREFERRED_PROVIDERS variable: %s" % p)
  395. continue
  396. if providee in self.status.preferred and self.status.preferred[providee] != provider:
  397. providerlog.error("conflicting preferences for %s: both %s and %s specified", providee, provider, self.status.preferred[providee])
  398. self.status.preferred[providee] = provider
  399. # Calculate priorities for each file
  400. matched = set()
  401. for p in self.status.pkg_fn:
  402. self.status.bbfile_priority[p] = self.calc_bbfile_priority(p, matched)
  403. # Don't show the warning if the BBFILE_PATTERN did match .bbappend files
  404. unmatched = set()
  405. for _, _, regex, pri in self.status.bbfile_config_priorities:
  406. if not regex in matched:
  407. unmatched.add(regex)
  408. def findmatch(regex):
  409. for bbfile in self.appendlist:
  410. for append in self.appendlist[bbfile]:
  411. if regex.match(append):
  412. return True
  413. return False
  414. for unmatch in unmatched.copy():
  415. if findmatch(unmatch):
  416. unmatched.remove(unmatch)
  417. for collection, pattern, regex, _ in self.status.bbfile_config_priorities:
  418. if regex in unmatched:
  419. collectlog.warn("No bb files matched BBFILE_PATTERN_%s '%s'" % (collection, pattern))
  420. def findConfigFiles(self, varname):
  421. """
  422. Find config files which are appropriate values for varname.
  423. i.e. MACHINE, DISTRO
  424. """
  425. possible = []
  426. var = varname.lower()
  427. data = self.configuration.data
  428. # iterate configs
  429. bbpaths = bb.data.getVar('BBPATH', data, True).split(':')
  430. for path in bbpaths:
  431. confpath = os.path.join(path, "conf", var)
  432. if os.path.exists(confpath):
  433. for root, dirs, files in os.walk(confpath):
  434. # get all child files, these are appropriate values
  435. for f in files:
  436. val, sep, end = f.rpartition('.')
  437. if end == 'conf':
  438. possible.append(val)
  439. bb.event.fire(bb.event.ConfigFilesFound(var, possible), self.configuration.data)
  440. def findInheritsClass(self, klass):
  441. """
  442. Find all recipes which inherit the specified class
  443. """
  444. pkg_list = []
  445. for pfn in self.status.pkg_fn:
  446. inherits = self.status.inherits.get(pfn, None)
  447. if inherits and inherits.count(klass) > 0:
  448. pkg_list.append(self.status.pkg_fn[pfn])
  449. return pkg_list
  450. def generateTargetsTreeData(self, pkgs_to_build, task):
  451. """
  452. Create a tree of pkgs_to_build metadata, returning the data.
  453. """
  454. taskdata, rq = self.prepareTreeData(pkgs_to_build, task)
  455. seen_fnids = []
  456. target_tree = {}
  457. target_tree["depends"] = {}
  458. target_tree["pn"] = {}
  459. target_tree["rdepends-pn"] = {}
  460. for task in xrange(len(rq.rqdata.runq_fnid)):
  461. taskname = rq.rqdata.runq_task[task]
  462. fnid = rq.rqdata.runq_fnid[task]
  463. fn = taskdata.fn_index[fnid]
  464. pn = self.status.pkg_fn[fn]
  465. version = "%s:%s-%s" % self.status.pkg_pepvpr[fn]
  466. summary = self.status.summary[fn]
  467. license = self.status.license[fn]
  468. section = self.status.section[fn]
  469. if pn not in target_tree["pn"]:
  470. target_tree["pn"][pn] = {}
  471. target_tree["pn"][pn]["filename"] = fn
  472. target_tree["pn"][pn]["version"] = version
  473. target_tree["pn"][pn]["summary"] = summary
  474. target_tree["pn"][pn]["license"] = license
  475. target_tree["pn"][pn]["section"] = section
  476. if fnid not in seen_fnids:
  477. seen_fnids.append(fnid)
  478. packages = []
  479. target_tree["depends"][pn] = []
  480. for dep in taskdata.depids[fnid]:
  481. target_tree["depends"][pn].append(taskdata.build_names_index[dep])
  482. target_tree["rdepends-pn"][pn] = []
  483. for rdep in taskdata.rdepids[fnid]:
  484. target_tree["rdepends-pn"][pn].append(taskdata.run_names_index[rdep])
  485. return target_tree
  486. def generateTargetsTree(self, klass):
  487. """
  488. Generate a dependency tree of buildable targets
  489. Generate an event with the result
  490. """
  491. pkgs = ['world']
  492. # if inherited_class passed ensure all recipes which inherit the
  493. # specified class are included in pkgs
  494. if klass:
  495. extra_pkgs = self.findInheritsClass(klass)
  496. pkgs = pkgs + extra_pkgs
  497. # generate a dependency tree for all our packages
  498. tree = self.generateTargetsTreeData(pkgs, 'build')
  499. bb.event.fire(bb.event.TargetsTreeGenerated(tree), self.configuration.data)
  500. def buildWorldTargetList(self):
  501. """
  502. Build package list for "bitbake world"
  503. """
  504. all_depends = self.status.all_depends
  505. pn_provides = self.status.pn_provides
  506. parselog.debug(1, "collating packages for \"world\"")
  507. for f in self.status.possible_world:
  508. terminal = True
  509. pn = self.status.pkg_fn[f]
  510. for p in pn_provides[pn]:
  511. if p.startswith('virtual/'):
  512. parselog.debug(2, "World build skipping %s due to %s provider starting with virtual/", f, p)
  513. terminal = False
  514. break
  515. for pf in self.status.providers[p]:
  516. if self.status.pkg_fn[pf] != pn:
  517. parselog.debug(2, "World build skipping %s due to both us and %s providing %s", f, pf, p)
  518. terminal = False
  519. break
  520. if terminal:
  521. self.status.world_target.add(pn)
  522. def interactiveMode( self ):
  523. """Drop off into a shell"""
  524. try:
  525. from bb import shell
  526. except ImportError:
  527. parselog.exception("Interactive mode not available")
  528. sys.exit(1)
  529. else:
  530. shell.start( self )
  531. def _findLayerConf(self):
  532. path = os.getcwd()
  533. while path != "/":
  534. bblayers = os.path.join(path, "conf", "bblayers.conf")
  535. if os.path.exists(bblayers):
  536. return bblayers
  537. path, _ = os.path.split(path)
  538. def parseConfigurationFiles(self, files):
  539. def _parse(f, data, include=False):
  540. try:
  541. return bb.parse.handle(f, data, include)
  542. except (IOError, bb.parse.ParseError) as exc:
  543. parselog.critical("Unable to parse %s: %s" % (f, exc))
  544. sys.exit(1)
  545. data = self.configuration.data
  546. bb.parse.init_parser(data)
  547. for f in files:
  548. data = _parse(f, data)
  549. layerconf = self._findLayerConf()
  550. if layerconf:
  551. parselog.debug(2, "Found bblayers.conf (%s)", layerconf)
  552. data = _parse(layerconf, data)
  553. layers = (bb.data.getVar('BBLAYERS', data, True) or "").split()
  554. data = bb.data.createCopy(data)
  555. for layer in layers:
  556. parselog.debug(2, "Adding layer %s", layer)
  557. bb.data.setVar('LAYERDIR', layer, data)
  558. data = _parse(os.path.join(layer, "conf", "layer.conf"), data)
  559. data.expandVarref('LAYERDIR')
  560. bb.data.delVar('LAYERDIR', data)
  561. if not data.getVar("BBPATH", True):
  562. raise SystemExit("The BBPATH variable is not set")
  563. data = _parse(os.path.join("conf", "bitbake.conf"), data)
  564. self.configuration.data = data
  565. # Handle any INHERITs and inherit the base class
  566. inherits = ["base"] + (bb.data.getVar('INHERIT', self.configuration.data, True ) or "").split()
  567. for inherit in inherits:
  568. self.configuration.data = _parse(os.path.join('classes', '%s.bbclass' % inherit), self.configuration.data, True )
  569. # Nomally we only register event handlers at the end of parsing .bb files
  570. # We register any handlers we've found so far here...
  571. for var in bb.data.getVar('__BBHANDLERS', self.configuration.data) or []:
  572. bb.event.register(var, bb.data.getVar(var, self.configuration.data))
  573. if bb.data.getVar("BB_WORKERCONTEXT", self.configuration.data) is None:
  574. bb.fetch.fetcher_init(self.configuration.data)
  575. bb.codeparser.parser_cache_init(self.configuration.data)
  576. bb.parse.init_parser(data)
  577. bb.event.fire(bb.event.ConfigParsed(), self.configuration.data)
  578. def handleCollections( self, collections ):
  579. """Handle collections"""
  580. if collections:
  581. collection_list = collections.split()
  582. for c in collection_list:
  583. regex = bb.data.getVar("BBFILE_PATTERN_%s" % c, self.configuration.data, 1)
  584. if regex == None:
  585. parselog.error("BBFILE_PATTERN_%s not defined" % c)
  586. continue
  587. priority = bb.data.getVar("BBFILE_PRIORITY_%s" % c, self.configuration.data, 1)
  588. if priority == None:
  589. parselog.error("BBFILE_PRIORITY_%s not defined" % c)
  590. continue
  591. try:
  592. cre = re.compile(regex)
  593. except re.error:
  594. parselog.error("BBFILE_PATTERN_%s \"%s\" is not a valid regular expression", c, regex)
  595. continue
  596. try:
  597. pri = int(priority)
  598. self.status.bbfile_config_priorities.append((c, regex, cre, pri))
  599. except ValueError:
  600. parselog.error("invalid value for BBFILE_PRIORITY_%s: \"%s\"", c, priority)
  601. def buildSetVars(self):
  602. """
  603. Setup any variables needed before starting a build
  604. """
  605. if not bb.data.getVar("BUILDNAME", self.configuration.data):
  606. bb.data.setVar("BUILDNAME", time.strftime('%Y%m%d%H%M'), self.configuration.data)
  607. bb.data.setVar("BUILDSTART", time.strftime('%m/%d/%Y %H:%M:%S', time.gmtime()), self.configuration.data)
  608. def matchFiles(self, bf):
  609. """
  610. Find the .bb files which match the expression in 'buildfile'.
  611. """
  612. if bf.startswith("/") or bf.startswith("../"):
  613. bf = os.path.abspath(bf)
  614. filelist, masked = self.collect_bbfiles()
  615. try:
  616. os.stat(bf)
  617. return [bf]
  618. except OSError:
  619. regexp = re.compile(bf)
  620. matches = []
  621. for f in filelist:
  622. if regexp.search(f) and os.path.isfile(f):
  623. matches.append(f)
  624. return matches
  625. def matchFile(self, buildfile):
  626. """
  627. Find the .bb file which matches the expression in 'buildfile'.
  628. Raise an error if multiple files
  629. """
  630. matches = self.matchFiles(buildfile)
  631. if len(matches) != 1:
  632. parselog.error("Unable to match %s (%s matches found):" % (buildfile, len(matches)))
  633. for f in matches:
  634. parselog.error(" %s" % f)
  635. raise MultipleMatches
  636. return matches[0]
  637. def buildFile(self, buildfile, task):
  638. """
  639. Build the file matching regexp buildfile
  640. """
  641. # Parse the configuration here. We need to do it explicitly here since
  642. # buildFile() doesn't use the cache
  643. self.parseConfiguration()
  644. # If we are told to do the None task then query the default task
  645. if (task == None):
  646. task = self.configuration.cmd
  647. fn, cls = bb.cache.Cache.virtualfn2realfn(buildfile)
  648. fn = self.matchFile(fn)
  649. self.buildSetVars()
  650. self.status = bb.cache.CacheData(self.caches_array)
  651. infos = bb.cache.Cache.parse(fn, self.get_file_appends(fn), \
  652. self.configuration.data,
  653. self.caches_array)
  654. infos = dict(infos)
  655. fn = bb.cache.Cache.realfn2virtual(fn, cls)
  656. try:
  657. maininfo = infos[fn]
  658. except KeyError:
  659. bb.fatal("%s does not exist" % fn)
  660. self.status.add_from_recipeinfo(fn, maininfo)
  661. # Tweak some variables
  662. item = maininfo.pn
  663. self.status.ignored_dependencies = set()
  664. self.status.bbfile_priority[fn] = 1
  665. # Remove external dependencies
  666. self.status.task_deps[fn]['depends'] = {}
  667. self.status.deps[fn] = []
  668. self.status.rundeps[fn] = []
  669. self.status.runrecs[fn] = []
  670. # Remove stamp for target if force mode active
  671. if self.configuration.force:
  672. logger.verbose("Remove stamp %s, %s", task, fn)
  673. bb.build.del_stamp('do_%s' % task, self.status, fn)
  674. # Setup taskdata structure
  675. taskdata = bb.taskdata.TaskData(self.configuration.abort)
  676. taskdata.add_provider(self.configuration.data, self.status, item)
  677. buildname = bb.data.getVar("BUILDNAME", self.configuration.data)
  678. bb.event.fire(bb.event.BuildStarted(buildname, [item]), self.configuration.event_data)
  679. # Clear locks
  680. bb.fetch.persistent_database_connection = {}
  681. # Execute the runqueue
  682. runlist = [[item, "do_%s" % task]]
  683. rq = bb.runqueue.RunQueue(self, self.configuration.data, self.status, taskdata, runlist)
  684. def buildFileIdle(server, rq, abort):
  685. if abort or self.state == state.stop:
  686. rq.finish_runqueue(True)
  687. elif self.state == state.shutdown:
  688. rq.finish_runqueue(False)
  689. failures = 0
  690. try:
  691. retval = rq.execute_runqueue()
  692. except runqueue.TaskFailure as exc:
  693. for fnid in exc.args:
  694. buildlog.error("'%s' failed" % taskdata.fn_index[fnid])
  695. failures += len(exc.args)
  696. retval = False
  697. if not retval:
  698. bb.event.fire(bb.event.BuildCompleted(buildname, item, failures), self.configuration.event_data)
  699. self.command.finishAsyncCommand()
  700. return False
  701. if retval is True:
  702. return True
  703. return retval
  704. self.server.register_idle_function(buildFileIdle, rq)
  705. def buildTargets(self, targets, task):
  706. """
  707. Attempt to build the targets specified
  708. """
  709. # Need files parsed
  710. self.updateCache()
  711. # If we are told to do the NULL task then query the default task
  712. if (task == None):
  713. task = self.configuration.cmd
  714. targets = self.checkPackages(targets)
  715. def buildTargetsIdle(server, rq, abort):
  716. if abort or self.state == state.stop:
  717. rq.finish_runqueue(True)
  718. elif self.state == state.shutdown:
  719. rq.finish_runqueue(False)
  720. failures = 0
  721. try:
  722. retval = rq.execute_runqueue()
  723. except runqueue.TaskFailure as exc:
  724. for fnid in exc.args:
  725. buildlog.error("'%s' failed" % taskdata.fn_index[fnid])
  726. failures += len(exc.args)
  727. retval = False
  728. if not retval:
  729. bb.event.fire(bb.event.BuildCompleted(buildname, targets, failures), self.configuration.event_data)
  730. self.command.finishAsyncCommand()
  731. return False
  732. if retval is True:
  733. return True
  734. return retval
  735. self.buildSetVars()
  736. buildname = bb.data.getVar("BUILDNAME", self.configuration.data)
  737. bb.event.fire(bb.event.BuildStarted(buildname, targets), self.configuration.event_data)
  738. localdata = data.createCopy(self.configuration.data)
  739. bb.data.update_data(localdata)
  740. bb.data.expandKeys(localdata)
  741. taskdata = bb.taskdata.TaskData(self.configuration.abort)
  742. runlist = []
  743. for k in targets:
  744. taskdata.add_provider(localdata, self.status, k)
  745. runlist.append([k, "do_%s" % task])
  746. taskdata.add_unresolved(localdata, self.status)
  747. # Clear locks
  748. bb.fetch.persistent_database_connection = {}
  749. rq = bb.runqueue.RunQueue(self, self.configuration.data, self.status, taskdata, runlist)
  750. self.server.register_idle_function(buildTargetsIdle, rq)
  751. def updateCache(self):
  752. if self.state == state.running:
  753. return
  754. if self.state != state.parsing:
  755. self.parseConfiguration ()
  756. # Import Psyco if available and not disabled
  757. import platform
  758. if platform.machine() in ['i386', 'i486', 'i586', 'i686']:
  759. if not self.configuration.disable_psyco:
  760. try:
  761. import psyco
  762. except ImportError:
  763. collectlog.info("Psyco JIT Compiler (http://psyco.sf.net) not available. Install it to increase performance.")
  764. else:
  765. psyco.bind( CookerParser.parse_next )
  766. else:
  767. collectlog.info("You have disabled Psyco. This decreases performance.")
  768. self.status = bb.cache.CacheData(self.caches_array)
  769. ignore = bb.data.getVar("ASSUME_PROVIDED", self.configuration.data, 1) or ""
  770. self.status.ignored_dependencies = set(ignore.split())
  771. for dep in self.configuration.extra_assume_provided:
  772. self.status.ignored_dependencies.add(dep)
  773. self.handleCollections( bb.data.getVar("BBFILE_COLLECTIONS", self.configuration.data, 1) )
  774. (filelist, masked) = self.collect_bbfiles()
  775. bb.data.renameVar("__depends", "__base_depends", self.configuration.data)
  776. self.parser = CookerParser(self, filelist, masked)
  777. self.state = state.parsing
  778. if not self.parser.parse_next():
  779. collectlog.debug(1, "parsing complete")
  780. self.buildDepgraph()
  781. self.state = state.running
  782. return None
  783. return True
  784. def checkPackages(self, pkgs_to_build):
  785. if len(pkgs_to_build) == 0:
  786. raise NothingToBuild
  787. if 'world' in pkgs_to_build:
  788. self.buildWorldTargetList()
  789. pkgs_to_build.remove('world')
  790. for t in self.status.world_target:
  791. pkgs_to_build.append(t)
  792. if 'universe' in pkgs_to_build:
  793. parselog.debug(1, "collating packages for \"universe\"")
  794. pkgs_to_build.remove('universe')
  795. for t in self.status.universe_target:
  796. pkgs_to_build.append(t)
  797. return pkgs_to_build
  798. def get_bbfiles( self, path = os.getcwd() ):
  799. """Get list of default .bb files by reading out the current directory"""
  800. contents = os.listdir(path)
  801. bbfiles = []
  802. for f in contents:
  803. (root, ext) = os.path.splitext(f)
  804. if ext == ".bb":
  805. bbfiles.append(os.path.abspath(os.path.join(os.getcwd(), f)))
  806. return bbfiles
  807. def find_bbfiles( self, path ):
  808. """Find all the .bb and .bbappend files in a directory"""
  809. from os.path import join
  810. found = []
  811. for dir, dirs, files in os.walk(path):
  812. for ignored in ('SCCS', 'CVS', '.svn'):
  813. if ignored in dirs:
  814. dirs.remove(ignored)
  815. found += [join(dir, f) for f in files if (f.endswith('.bb') or f.endswith('.bbappend'))]
  816. return found
  817. def collect_bbfiles( self ):
  818. """Collect all available .bb build files"""
  819. parsed, cached, skipped, masked = 0, 0, 0, 0
  820. collectlog.debug(1, "collecting .bb files")
  821. files = (data.getVar( "BBFILES", self.configuration.data, 1 ) or "").split()
  822. data.setVar("BBFILES", " ".join(files), self.configuration.data)
  823. # Sort files by priority
  824. files.sort( key=lambda fileitem: self.calc_bbfile_priority(fileitem) )
  825. if not len(files):
  826. files = self.get_bbfiles()
  827. if not len(files):
  828. collectlog.error("no recipe files to build, check your BBPATH and BBFILES?")
  829. bb.event.fire(CookerExit(), self.configuration.event_data)
  830. # Can't use set here as order is important
  831. newfiles = []
  832. for f in files:
  833. if os.path.isdir(f):
  834. dirfiles = self.find_bbfiles(f)
  835. for g in dirfiles:
  836. if g not in newfiles:
  837. newfiles.append(g)
  838. else:
  839. globbed = glob.glob(f)
  840. if not globbed and os.path.exists(f):
  841. globbed = [f]
  842. for g in globbed:
  843. if g not in newfiles:
  844. newfiles.append(g)
  845. bbmask = bb.data.getVar('BBMASK', self.configuration.data, 1)
  846. if bbmask:
  847. try:
  848. bbmask_compiled = re.compile(bbmask)
  849. except sre_constants.error:
  850. collectlog.critical("BBMASK is not a valid regular expression, ignoring.")
  851. return list(newfiles), 0
  852. bbfiles = []
  853. bbappend = []
  854. for f in newfiles:
  855. if bbmask and bbmask_compiled.search(f):
  856. collectlog.debug(1, "skipping masked file %s", f)
  857. masked += 1
  858. continue
  859. if f.endswith('.bb'):
  860. bbfiles.append(f)
  861. elif f.endswith('.bbappend'):
  862. bbappend.append(f)
  863. else:
  864. collectlog.debug(1, "skipping %s: unknown file extension", f)
  865. # Build a list of .bbappend files for each .bb file
  866. for f in bbappend:
  867. base = os.path.basename(f).replace('.bbappend', '.bb')
  868. if not base in self.appendlist:
  869. self.appendlist[base] = []
  870. self.appendlist[base].append(f)
  871. return (bbfiles, masked)
  872. def get_file_appends(self, fn):
  873. """
  874. Returns a list of .bbappend files to apply to fn
  875. NB: collect_bbfiles() must have been called prior to this
  876. """
  877. f = os.path.basename(fn)
  878. if f in self.appendlist:
  879. return self.appendlist[f]
  880. return []
  881. def pre_serve(self):
  882. # Empty the environment. The environment will be populated as
  883. # necessary from the data store.
  884. #bb.utils.empty_environment()
  885. return
  886. def post_serve(self):
  887. bb.event.fire(CookerExit(), self.configuration.event_data)
  888. def shutdown(self):
  889. self.state = state.shutdown
  890. def stop(self):
  891. self.state = state.stop
  892. def server_main(cooker, func, *args):
  893. cooker.pre_serve()
  894. if cooker.configuration.profile:
  895. try:
  896. import cProfile as profile
  897. except:
  898. import profile
  899. prof = profile.Profile()
  900. ret = profile.Profile.runcall(prof, func, *args)
  901. prof.dump_stats("profile.log")
  902. # Redirect stdout to capture profile information
  903. pout = open('profile.log.processed', 'w')
  904. so = sys.stdout.fileno()
  905. orig_so = os.dup(sys.stdout.fileno())
  906. os.dup2(pout.fileno(), so)
  907. import pstats
  908. p = pstats.Stats('profile.log')
  909. p.sort_stats('time')
  910. p.print_stats()
  911. p.print_callers()
  912. p.sort_stats('cumulative')
  913. p.print_stats()
  914. os.dup2(orig_so, so)
  915. pout.flush()
  916. pout.close()
  917. print("Raw profiling information saved to profile.log and processed statistics to profile.log.processed")
  918. else:
  919. ret = func(*args)
  920. cooker.post_serve()
  921. return ret
  922. class CookerExit(bb.event.Event):
  923. """
  924. Notify clients of the Cooker shutdown
  925. """
  926. def __init__(self):
  927. bb.event.Event.__init__(self)
  928. class ParsingFailure(Exception):
  929. def __init__(self, realexception, recipe):
  930. self.realexception = realexception
  931. self.recipe = recipe
  932. Exception.__init__(self, "Failure when parsing %s" % recipe)
  933. self.args = (realexception, recipe)
  934. def parse_file(task):
  935. filename, appends, caches_array = task
  936. try:
  937. return True, bb.cache.Cache.parse(filename, appends, parse_file.cfg, caches_array)
  938. except Exception, exc:
  939. exc.recipe = filename
  940. raise exc
  941. # Need to turn BaseExceptions into Exceptions here so we gracefully shutdown
  942. # and for example a worker thread doesn't just exit on its own in response to
  943. # a SystemExit event for example.
  944. except BaseException, exc:
  945. raise ParsingFailure(exc, filename)
  946. class CookerParser(object):
  947. def __init__(self, cooker, filelist, masked):
  948. self.filelist = filelist
  949. self.cooker = cooker
  950. self.cfgdata = cooker.configuration.data
  951. # Accounting statistics
  952. self.parsed = 0
  953. self.cached = 0
  954. self.error = 0
  955. self.masked = masked
  956. self.skipped = 0
  957. self.virtuals = 0
  958. self.total = len(filelist)
  959. self.current = 0
  960. self.num_processes = int(self.cfgdata.getVar("BB_NUMBER_PARSE_THREADS", True) or
  961. multiprocessing.cpu_count())
  962. self.bb_cache = bb.cache.Cache(self.cfgdata, cooker.caches_array)
  963. self.fromcache = []
  964. self.willparse = []
  965. for filename in self.filelist:
  966. appends = self.cooker.get_file_appends(filename)
  967. if not self.bb_cache.cacheValid(filename):
  968. self.willparse.append((filename, appends, cooker.caches_array))
  969. else:
  970. self.fromcache.append((filename, appends))
  971. self.toparse = self.total - len(self.fromcache)
  972. self.progress_chunk = max(self.toparse / 100, 1)
  973. self.start()
  974. def start(self):
  975. def init(cfg):
  976. signal.signal(signal.SIGINT, signal.SIG_IGN)
  977. multiprocessing.util.Finalize(None, bb.codeparser.parser_cache_save, args=(self.cooker.configuration.data, ), exitpriority=1)
  978. parse_file.cfg = cfg
  979. bb.event.fire(bb.event.ParseStarted(self.toparse), self.cfgdata)
  980. self.pool = multiprocessing.Pool(self.num_processes, init, [self.cfgdata])
  981. parsed = self.pool.imap(parse_file, self.willparse)
  982. self.pool.close()
  983. self.results = itertools.chain(self.load_cached(), parsed)
  984. def shutdown(self, clean=True):
  985. if clean:
  986. event = bb.event.ParseCompleted(self.cached, self.parsed,
  987. self.skipped, self.masked,
  988. self.virtuals, self.error,
  989. self.total)
  990. bb.event.fire(event, self.cfgdata)
  991. else:
  992. self.pool.terminate()
  993. self.pool.join()
  994. sync = threading.Thread(target=self.bb_cache.sync)
  995. sync.start()
  996. atexit.register(lambda: sync.join())
  997. bb.codeparser.parser_cache_savemerge(self.cooker.configuration.data)
  998. def load_cached(self):
  999. for filename, appends in self.fromcache:
  1000. cached, infos = self.bb_cache.load(filename, appends, self.cfgdata)
  1001. yield not cached, infos
  1002. def parse_next(self):
  1003. try:
  1004. parsed, result = self.results.next()
  1005. except StopIteration:
  1006. self.shutdown()
  1007. return False
  1008. except KeyboardInterrupt:
  1009. self.shutdown(clean=False)
  1010. raise
  1011. except Exception as exc:
  1012. self.shutdown(clean=False)
  1013. bb.fatal('Error parsing %s: %s' % (exc.recipe, exc))
  1014. self.current += 1
  1015. self.virtuals += len(result)
  1016. if parsed:
  1017. self.parsed += 1
  1018. if self.parsed % self.progress_chunk == 0:
  1019. bb.event.fire(bb.event.ParseProgress(self.parsed),
  1020. self.cfgdata)
  1021. else:
  1022. self.cached += 1
  1023. for virtualfn, info in result:
  1024. if info.skipped:
  1025. self.skipped += 1
  1026. self.bb_cache.add_info(virtualfn, info, self.cooker.status,
  1027. parsed=parsed)
  1028. return True
  1029. def reparse(self, filename):
  1030. infos = self.bb_cache.parse(filename,
  1031. self.cooker.get_file_appends(filename),
  1032. self.cfgdata, self.cooker.caches_array)
  1033. for vfn, info in infos:
  1034. self.cooker.status.add_from_recipeinfo(vfn, info)